Updated DB_Helper by adding firebase methods.
This commit is contained in:
parent
485cc3bbba
commit
c82121d036
1810 changed files with 537281 additions and 1 deletions
62
venv/Lib/site-packages/gcloud/datastore/__init__.py
Normal file
62
venv/Lib/site-packages/gcloud/datastore/__init__.py
Normal file
|
@ -0,0 +1,62 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Shortcut methods for getting set up with Google Cloud Datastore.
|
||||
|
||||
You'll typically use these to get started with the API:
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>>
|
||||
>>> client = datastore.Client()
|
||||
>>> key = client.key('EntityKind', 1234)
|
||||
>>> entity = datastore.Entity(key)
|
||||
>>> query = client.query(kind='EntityKind')
|
||||
|
||||
The main concepts with this API are:
|
||||
|
||||
- :class:`gcloud.datastore.connection.Connection`
|
||||
which represents a connection between your machine and the Cloud Datastore
|
||||
API.
|
||||
|
||||
- :class:`gcloud.datastore.client.Client`
|
||||
which represents a project (string) and namespace (string) bundled with
|
||||
a connection and has convenience methods for constructing objects with that
|
||||
project / namespace.
|
||||
|
||||
- :class:`gcloud.datastore.entity.Entity`
|
||||
which represents a single entity in the datastore
|
||||
(akin to a row in relational database world).
|
||||
|
||||
- :class:`gcloud.datastore.key.Key`
|
||||
which represents a pointer to a particular entity in the datastore
|
||||
(akin to a unique identifier in relational database world).
|
||||
|
||||
- :class:`gcloud.datastore.query.Query`
|
||||
which represents a lookup or search over the rows in the datastore.
|
||||
|
||||
- :class:`gcloud.datastore.transaction.Transaction`
|
||||
which represents an all-or-none transaction and enables consistency
|
||||
when race conditions may occur.
|
||||
"""
|
||||
|
||||
from gcloud.datastore.batch import Batch
|
||||
from gcloud.datastore.connection import Connection
|
||||
from gcloud.datastore.client import Client
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.key import Key
|
||||
from gcloud.datastore.query import Query
|
||||
from gcloud.datastore.transaction import Transaction
|
||||
|
||||
|
||||
SCOPE = Connection.SCOPE
|
|
@ -0,0 +1,15 @@
|
|||
# Copyright 2015 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Generated protobuf modules for Google Cloud Datastore API."""
|
|
@ -0,0 +1,289 @@
|
|||
// Copyright (c) 2015, Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.datastore.v1beta3;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/datastore/v1beta3/entity.proto";
|
||||
import "google/datastore/v1beta3/query.proto";
|
||||
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "DatastoreProto";
|
||||
option java_package = "com.google.datastore.v1beta3";
|
||||
|
||||
|
||||
// Each RPC normalizes the partition IDs of the keys in its input entities,
|
||||
// and always returns entities with keys with normalized partition IDs.
|
||||
// This applies to all keys and entities, including those in values, except keys
|
||||
// with both an empty path and an empty or unset partition ID. Normalization of
|
||||
// input keys sets the project ID (if not already set) to the project ID from
|
||||
// the request.
|
||||
//
|
||||
service Datastore {
|
||||
// Look up entities by key.
|
||||
rpc Lookup(LookupRequest) returns (LookupResponse) {
|
||||
option (google.api.http) = { post: "/v1beta3/projects/{project_id}:lookup" body: "*" };
|
||||
}
|
||||
|
||||
// Query for entities.
|
||||
rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) {
|
||||
option (google.api.http) = { post: "/v1beta3/projects/{project_id}:runQuery" body: "*" };
|
||||
}
|
||||
|
||||
// Begin a new transaction.
|
||||
rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
|
||||
option (google.api.http) = { post: "/v1beta3/projects/{project_id}:beginTransaction" body: "*" };
|
||||
}
|
||||
|
||||
// Commit a transaction, optionally creating, deleting or modifying some
|
||||
// entities.
|
||||
rpc Commit(CommitRequest) returns (CommitResponse) {
|
||||
option (google.api.http) = { post: "/v1beta3/projects/{project_id}:commit" body: "*" };
|
||||
}
|
||||
|
||||
// Roll back a transaction.
|
||||
rpc Rollback(RollbackRequest) returns (RollbackResponse) {
|
||||
option (google.api.http) = { post: "/v1beta3/projects/{project_id}:rollback" body: "*" };
|
||||
}
|
||||
|
||||
// Allocate IDs for the given keys (useful for referencing an entity before
|
||||
// it is inserted).
|
||||
rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) {
|
||||
option (google.api.http) = { post: "/v1beta3/projects/{project_id}:allocateIds" body: "*" };
|
||||
}
|
||||
}
|
||||
|
||||
// The request for [google.datastore.v1beta3.Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup].
|
||||
message LookupRequest {
|
||||
// Project ID against which to make the request.
|
||||
string project_id = 8;
|
||||
|
||||
// Options for this lookup request.
|
||||
ReadOptions read_options = 1;
|
||||
|
||||
// Keys of entities to look up.
|
||||
repeated Key keys = 3;
|
||||
}
|
||||
|
||||
// The response for [google.datastore.v1beta3.Datastore.Lookup][google.datastore.v1beta3.Datastore.Lookup].
|
||||
message LookupResponse {
|
||||
// Entities found as `ResultType.FULL` entities. The order of results in this
|
||||
// field is undefined and has no relation to the order of the keys in the
|
||||
// input.
|
||||
repeated EntityResult found = 1;
|
||||
|
||||
// Entities not found as `ResultType.KEY_ONLY` entities. The order of results
|
||||
// in this field is undefined and has no relation to the order of the keys
|
||||
// in the input.
|
||||
repeated EntityResult missing = 2;
|
||||
|
||||
// A list of keys that were not looked up due to resource constraints. The
|
||||
// order of results in this field is undefined and has no relation to the
|
||||
// order of the keys in the input.
|
||||
repeated Key deferred = 3;
|
||||
}
|
||||
|
||||
// The request for [google.datastore.v1beta3.Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery].
|
||||
message RunQueryRequest {
|
||||
// Project ID against which to make the request.
|
||||
string project_id = 8;
|
||||
|
||||
// Entities are partitioned into subsets, identified by a partition ID.
|
||||
// Queries are scoped to a single partition.
|
||||
// This partition ID is normalized with the standard default context
|
||||
// partition ID.
|
||||
PartitionId partition_id = 2;
|
||||
|
||||
// The options for this query.
|
||||
ReadOptions read_options = 1;
|
||||
|
||||
// The type of query.
|
||||
oneof query_type {
|
||||
// The query to run.
|
||||
Query query = 3;
|
||||
|
||||
// The GQL query to run.
|
||||
GqlQuery gql_query = 7;
|
||||
}
|
||||
}
|
||||
|
||||
// The response for [google.datastore.v1beta3.Datastore.RunQuery][google.datastore.v1beta3.Datastore.RunQuery].
|
||||
message RunQueryResponse {
|
||||
// A batch of query results (always present).
|
||||
QueryResultBatch batch = 1;
|
||||
|
||||
// The parsed form of the `GqlQuery` from the request, if it was set.
|
||||
Query query = 2;
|
||||
}
|
||||
|
||||
// The request for [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
|
||||
message BeginTransactionRequest {
|
||||
// Project ID against which to make the request.
|
||||
string project_id = 8;
|
||||
}
|
||||
|
||||
// The response for [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
|
||||
message BeginTransactionResponse {
|
||||
// The transaction identifier (always present).
|
||||
bytes transaction = 1;
|
||||
}
|
||||
|
||||
// The request for [google.datastore.v1beta3.Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback].
|
||||
message RollbackRequest {
|
||||
// Project ID against which to make the request.
|
||||
string project_id = 8;
|
||||
|
||||
// The transaction identifier, returned by a call to
|
||||
// [google.datastore.v1beta3.Datastore.BeginTransaction][google.datastore.v1beta3.Datastore.BeginTransaction].
|
||||
bytes transaction = 1;
|
||||
}
|
||||
|
||||
// The response for [google.datastore.v1beta3.Datastore.Rollback][google.datastore.v1beta3.Datastore.Rollback]
|
||||
// (an empty message).
|
||||
message RollbackResponse {
|
||||
|
||||
}
|
||||
|
||||
// The request for [google.datastore.v1beta3.Datastore.Commit][google.datastore.v1beta3.Datastore.Commit].
|
||||
message CommitRequest {
|
||||
// Commit modes.
|
||||
enum Mode {
|
||||
// Unspecified.
|
||||
MODE_UNSPECIFIED = 0;
|
||||
|
||||
// Transactional.
|
||||
TRANSACTIONAL = 1;
|
||||
|
||||
// Non-transactional.
|
||||
NON_TRANSACTIONAL = 2;
|
||||
}
|
||||
|
||||
// Project ID against which to make the request.
|
||||
string project_id = 8;
|
||||
|
||||
// The type of commit to perform. Defaults to `TRANSACTIONAL`.
|
||||
Mode mode = 5;
|
||||
|
||||
// Must be set when mode is `TRANSACTIONAL`.
|
||||
oneof transaction_selector {
|
||||
// The transaction in which to write.
|
||||
bytes transaction = 1;
|
||||
}
|
||||
|
||||
// The mutations to perform.
|
||||
//
|
||||
// When mode is `TRANSACTIONAL`, mutations affecting a single entity are
|
||||
// applied in order. The following sequences of mutations affecting a single
|
||||
// entity are not permitted in a single `Commit` request:
|
||||
// - `insert` followed by `insert`
|
||||
// - `update` followed by `insert`
|
||||
// - `upsert` followed by `insert`
|
||||
// - `delete` followed by `update`
|
||||
//
|
||||
// When mode is `NON_TRANSACTIONAL`, no two mutations may affect a single
|
||||
// entity.
|
||||
repeated Mutation mutations = 6;
|
||||
}
|
||||
|
||||
// The response for [google.datastore.v1beta3.Datastore.Commit][google.datastore.v1beta3.Datastore.Commit].
|
||||
message CommitResponse {
|
||||
// The result of performing the mutations.
|
||||
// The i-th mutation result corresponds to the i-th mutation in the request.
|
||||
repeated MutationResult mutation_results = 3;
|
||||
|
||||
// The number of index entries updated during the commit.
|
||||
int32 index_updates = 4;
|
||||
}
|
||||
|
||||
// The request for [google.datastore.v1beta3.Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds].
|
||||
message AllocateIdsRequest {
|
||||
// Project ID against which to make the request.
|
||||
string project_id = 8;
|
||||
|
||||
// A list of keys with incomplete key paths for which to allocate IDs.
|
||||
// No key may be reserved/read-only.
|
||||
repeated Key keys = 1;
|
||||
}
|
||||
|
||||
// The response for [google.datastore.v1beta3.Datastore.AllocateIds][google.datastore.v1beta3.Datastore.AllocateIds].
|
||||
message AllocateIdsResponse {
|
||||
// The keys specified in the request (in the same order), each with
|
||||
// its key path completed with a newly allocated ID.
|
||||
repeated Key keys = 1;
|
||||
}
|
||||
|
||||
// A mutation to apply to an entity.
|
||||
message Mutation {
|
||||
// The mutation operation.
|
||||
//
|
||||
// For `insert`, `update`, and `upsert`:
|
||||
// - The entity's key must not be reserved/read-only.
|
||||
// - No property in the entity may have a reserved name,
|
||||
// not even a property in an entity in a value.
|
||||
// - No value in the entity may have meaning 18,
|
||||
// not even a value in an entity in another value.
|
||||
oneof operation {
|
||||
// The entity to insert. The entity must not already exist.
|
||||
// The entity's key's final path element may be incomplete.
|
||||
Entity insert = 4;
|
||||
|
||||
// The entity to update. The entity must already exist.
|
||||
// Must have a complete key path.
|
||||
Entity update = 5;
|
||||
|
||||
// The entity to upsert. The entity may or may not already exist.
|
||||
// The entity's key's final path element may be incomplete.
|
||||
Entity upsert = 6;
|
||||
|
||||
// The key of the entity to delete. The entity may or may not already exist.
|
||||
// Must have a complete key path and must not be reserved/read-only.
|
||||
Key delete = 7;
|
||||
}
|
||||
}
|
||||
|
||||
// The result of applying a mutation.
|
||||
message MutationResult {
|
||||
// The automatically allocated key.
|
||||
// Set only when the mutation allocated a key.
|
||||
Key key = 3;
|
||||
}
|
||||
|
||||
// Options shared by read requests.
|
||||
message ReadOptions {
|
||||
// Read consistencies.
|
||||
enum ReadConsistency {
|
||||
// Unspecified.
|
||||
READ_CONSISTENCY_UNSPECIFIED = 0;
|
||||
|
||||
// Strong consistency.
|
||||
STRONG = 1;
|
||||
|
||||
// Eventual consistency.
|
||||
EVENTUAL = 2;
|
||||
}
|
||||
|
||||
// If not specified, lookups and ancestor queries default to
|
||||
// `read_consistency`=`STRONG`, global queries default to
|
||||
// `read_consistency`=`EVENTUAL`.
|
||||
oneof consistency_type {
|
||||
// The non-transactional read consistency to use.
|
||||
// Cannot be set to `STRONG` for global queries.
|
||||
ReadConsistency read_consistency = 1;
|
||||
|
||||
// The transaction in which to read.
|
||||
bytes transaction = 2;
|
||||
}
|
||||
}
|
196
venv/Lib/site-packages/gcloud/datastore/_generated/_entity.proto
Normal file
196
venv/Lib/site-packages/gcloud/datastore/_generated/_entity.proto
Normal file
|
@ -0,0 +1,196 @@
|
|||
// Copyright (c) 2015, Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.datastore.v1beta3;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/protobuf/struct.proto";
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/type/latlng.proto";
|
||||
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "EntityProto";
|
||||
option java_package = "com.google.datastore.v1beta3";
|
||||
|
||||
|
||||
// A partition ID identifies a grouping of entities. The grouping is always
|
||||
// by project and namespace, however the namespace ID may be empty.
|
||||
//
|
||||
// A partition ID contains several dimensions:
|
||||
// project ID and namespace ID.
|
||||
// Partition dimensions:
|
||||
// - A dimension may be `""`.
|
||||
// - A dimension must be valid UTF-8 bytes.
|
||||
// - A dimension's value must match regex `[A-Za-z\d\.\-_]{1,100}`
|
||||
// If the value of any dimension matches regex `__.*__`, the partition is
|
||||
// reserved/read-only.
|
||||
// A reserved/read-only partition ID is forbidden in certain documented
|
||||
// contexts.
|
||||
//
|
||||
// Foreign partition IDs (in which the project ID does
|
||||
// not match the context project ID ) are discouraged.
|
||||
// Reads and writes of foreign partition IDs may fail if the project is not in an active state.
|
||||
message PartitionId {
|
||||
// Project ID.
|
||||
string project_id = 2;
|
||||
|
||||
// Namespace ID.
|
||||
string namespace_id = 4;
|
||||
}
|
||||
|
||||
// A unique identifier for an entity.
|
||||
// If a key's partition id or any of its path kinds or names are
|
||||
// reserved/read-only, the key is reserved/read-only.
|
||||
// A reserved/read-only key is forbidden in certain documented contexts.
|
||||
message Key {
|
||||
// A (kind, ID/name) pair used to construct a key path.
|
||||
//
|
||||
// If either name nor ID is set, the element is complete.
|
||||
// If neither is set, the element is incomplete.
|
||||
message PathElement {
|
||||
// The kind of the entity.
|
||||
// A kind matching regex `__.*__` is reserved/read-only.
|
||||
// A kind must not contain more than 1500 bytes when UTF-8 encoded.
|
||||
// Cannot be `""`.
|
||||
string kind = 1;
|
||||
|
||||
// The type of id.
|
||||
oneof id_type {
|
||||
// The auto allocated ID of the entity.
|
||||
// Never equal to zero. Values less than zero are discouraged and may not
|
||||
// be supported in the future.
|
||||
int64 id = 2;
|
||||
|
||||
// The name of the entity.
|
||||
// A name matching regex `__.*__` is reserved/read-only.
|
||||
// A name must not be more than 1500 bytes when UTF-8 encoded.
|
||||
// Cannot be `""`.
|
||||
string name = 3;
|
||||
}
|
||||
}
|
||||
|
||||
// Entities are partitioned into subsets, currently identified by a dataset
|
||||
// (usually implicitly specified by the project) and namespace ID.
|
||||
// Queries are scoped to a single partition.
|
||||
PartitionId partition_id = 1;
|
||||
|
||||
// The entity path.
|
||||
// An entity path consists of one or more elements composed of a kind and a
|
||||
// string or numerical identifier, which identify entities. The first
|
||||
// element identifies a _root entity_, the second element identifies
|
||||
// a _child_ of the root entity, the third element a child of the
|
||||
// second entity, and so forth. The entities identified by all prefixes of
|
||||
// the path are called the element's _ancestors_.
|
||||
// An entity path is always fully complete: *all* of the entity's ancestors
|
||||
// are required to be in the path along with the entity identifier itself.
|
||||
// The only exception is that in some documented cases, the identifier in the
|
||||
// last path element (for the entity) itself may be omitted. A path can never
|
||||
// be empty. The path can have at most 100 elements.
|
||||
repeated PathElement path = 2;
|
||||
}
|
||||
|
||||
// An array value.
|
||||
message ArrayValue {
|
||||
// Values in the array.
|
||||
// The order of this array may not be preserved if it contains a mix of
|
||||
// indexed and unindexed values.
|
||||
repeated Value values = 1;
|
||||
}
|
||||
|
||||
// A message that can hold any of the supported value types and associated
|
||||
// metadata.
|
||||
message Value {
|
||||
// Must have a value set.
|
||||
oneof value_type {
|
||||
// A null value.
|
||||
google.protobuf.NullValue null_value = 11;
|
||||
|
||||
// A boolean value.
|
||||
bool boolean_value = 1;
|
||||
|
||||
// An integer value.
|
||||
int64 integer_value = 2;
|
||||
|
||||
// A double value.
|
||||
double double_value = 3;
|
||||
|
||||
// A timestamp value.
|
||||
// When stored in the Datastore, precise only to microseconds;
|
||||
// any additional precision is rounded down.
|
||||
google.protobuf.Timestamp timestamp_value = 10;
|
||||
|
||||
// A key value.
|
||||
Key key_value = 5;
|
||||
|
||||
// A UTF-8 encoded string value.
|
||||
// When `exclude_from_indexes` is false (it is indexed) and meaning is not
|
||||
// 2, may have at most 1500 bytes.
|
||||
// When meaning is 2, may have at most 2083 bytes.
|
||||
// Otherwise, may be set to at least 1,000,000 bytes
|
||||
string string_value = 17;
|
||||
|
||||
// A blob value.
|
||||
// May have at most 1,000,000 bytes.
|
||||
// When `exclude_from_indexes` is false, may have at most 1500 bytes.
|
||||
// In JSON requests, must be base64-encoded.
|
||||
bytes blob_value = 18;
|
||||
|
||||
// A geo point value representing a point on the surface of Earth.
|
||||
google.type.LatLng geo_point_value = 8;
|
||||
|
||||
// An entity value.
|
||||
// May have no key.
|
||||
// May have a key with an incomplete key path.
|
||||
// May have a reserved/read-only key.
|
||||
Entity entity_value = 6;
|
||||
|
||||
// An array value.
|
||||
// Cannot contain another array value.
|
||||
// A `Value` instance that sets field `array_value` must not set fields
|
||||
// `meaning` or `exclude_from_indexes`.
|
||||
ArrayValue array_value = 9;
|
||||
}
|
||||
|
||||
// The `meaning` field should only be populated for backwards compatibility.
|
||||
int32 meaning = 14;
|
||||
|
||||
// If the value should be excluded from all indexes including those defined
|
||||
// explicitly.
|
||||
bool exclude_from_indexes = 19;
|
||||
}
|
||||
|
||||
// An entity.
|
||||
//
|
||||
// An entity is limited to 1 megabyte when stored. That _roughly_
|
||||
// corresponds to a limit of 1 megabyte for the serialized form of this
|
||||
// message.
|
||||
message Entity {
|
||||
// The entity's key.
|
||||
//
|
||||
// An entity must have a key, unless otherwise documented (for example,
|
||||
// an entity in `Value.entity_value` may have no key).
|
||||
// An entity's kind is its key's path's last element's kind,
|
||||
// or null if it has no key.
|
||||
Key key = 1;
|
||||
|
||||
// The entity's properties.
|
||||
// The map's keys are property names.
|
||||
// A property name matching regex `__.*__` is reserved.
|
||||
// A reserved property name is forbidden in certain documented contexts.
|
||||
// The name must not contain more than 500 characters.
|
||||
// The name cannot be `""`.
|
||||
map<string, Value> properties = 3;
|
||||
}
|
281
venv/Lib/site-packages/gcloud/datastore/_generated/_query.proto
Normal file
281
venv/Lib/site-packages/gcloud/datastore/_generated/_query.proto
Normal file
|
@ -0,0 +1,281 @@
|
|||
// Copyright (c) 2015, Google Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package google.datastore.v1beta3;
|
||||
|
||||
import "google/api/annotations.proto";
|
||||
import "google/datastore/v1beta3/entity.proto";
|
||||
import "google/protobuf/wrappers.proto";
|
||||
|
||||
option java_multiple_files = true;
|
||||
option java_outer_classname = "QueryProto";
|
||||
option java_package = "com.google.datastore.v1beta3";
|
||||
|
||||
|
||||
// The result of fetching an entity from the datastore.
|
||||
message EntityResult {
|
||||
// Specifies what data the 'entity' field contains.
|
||||
// A `ResultType` is either implied (for example, in `LookupResponse.found`
|
||||
// from `datastore.proto`, it is always `FULL`) or specified by context (for
|
||||
// example, in message `QueryResultBatch`, field `entity_result_type`
|
||||
// specifies a `ResultType` for all the values in field `entity_results`).
|
||||
enum ResultType {
|
||||
// Unspecified.
|
||||
RESULT_TYPE_UNSPECIFIED = 0;
|
||||
|
||||
// The entire entity.
|
||||
FULL = 1;
|
||||
|
||||
// A projected subset of properties. The entity may have no key. A property
|
||||
// value may have meaning 18.
|
||||
PROJECTION = 2;
|
||||
|
||||
// Only the key.
|
||||
KEY_ONLY = 3;
|
||||
}
|
||||
|
||||
// The resulting entity.
|
||||
Entity entity = 1;
|
||||
|
||||
// A cursor that points to the position after the result entity.
|
||||
// Set only when the `EntityResult` is part of a `QueryResultBatch` message.
|
||||
bytes cursor = 3;
|
||||
}
|
||||
|
||||
// A query.
|
||||
message Query {
|
||||
// The projection to return. Defaults to returning all properties.
|
||||
repeated Projection projection = 2;
|
||||
|
||||
// The kinds to query (if empty, returns entities of all kinds).
|
||||
// Currently at most 1 kind may be specified.
|
||||
repeated KindExpression kind = 3;
|
||||
|
||||
// The filter to apply.
|
||||
Filter filter = 4;
|
||||
|
||||
// The order to apply to the query results (if empty, order is unspecified).
|
||||
repeated PropertyOrder order = 5;
|
||||
|
||||
// The properties to make distinct. The query results will contain the first
|
||||
// result for each distinct combination of values for the given properties
|
||||
// (if empty, all results are returned).
|
||||
repeated PropertyReference distinct_on = 6;
|
||||
|
||||
// A starting point for the query results. Query cursors are
|
||||
// returned in query result batches.
|
||||
bytes start_cursor = 7;
|
||||
|
||||
// An ending point for the query results. Query cursors are
|
||||
// returned in query result batches.
|
||||
bytes end_cursor = 8;
|
||||
|
||||
// The number of results to skip. Applies before limit, but after all other
|
||||
// constraints.
|
||||
// Must be >= 0.
|
||||
int32 offset = 10;
|
||||
|
||||
// The maximum number of results to return. Applies after all other
|
||||
// constraints.
|
||||
// Unspecified is interpreted as no limit.
|
||||
// Must be >= 0.
|
||||
google.protobuf.Int32Value limit = 12;
|
||||
}
|
||||
|
||||
// A representation of a kind.
|
||||
message KindExpression {
|
||||
// The name of the kind.
|
||||
string name = 1;
|
||||
}
|
||||
|
||||
// A reference to a property relative to the kind expressions.
|
||||
message PropertyReference {
|
||||
// The name of the property.
|
||||
string name = 2;
|
||||
}
|
||||
|
||||
// A representation of a property in a projection.
|
||||
message Projection {
|
||||
// The property to project.
|
||||
PropertyReference property = 1;
|
||||
}
|
||||
|
||||
// The desired order for a specific property.
|
||||
message PropertyOrder {
|
||||
// Direction.
|
||||
enum Direction {
|
||||
// Unspecified.
|
||||
DIRECTION_UNSPECIFIED = 0;
|
||||
|
||||
// Ascending.
|
||||
ASCENDING = 1;
|
||||
|
||||
// Descending.
|
||||
DESCENDING = 2;
|
||||
}
|
||||
|
||||
// The property to order by.
|
||||
PropertyReference property = 1;
|
||||
|
||||
// The direction to order by. Defaults to `ASCENDING`.
|
||||
Direction direction = 2;
|
||||
}
|
||||
|
||||
// A holder for any type of filter.
|
||||
message Filter {
|
||||
// The type of filter.
|
||||
oneof filter_type {
|
||||
// A composite filter.
|
||||
CompositeFilter composite_filter = 1;
|
||||
|
||||
// A filter on a property.
|
||||
PropertyFilter property_filter = 2;
|
||||
}
|
||||
}
|
||||
|
||||
// A filter that merges the multiple other filters using the given operator.
|
||||
message CompositeFilter {
|
||||
// Composite filter operator.
|
||||
enum Operator {
|
||||
// Unspecified. This value must not be used.
|
||||
OPERATOR_UNSPECIFIED = 0;
|
||||
|
||||
// And.
|
||||
AND = 1;
|
||||
}
|
||||
|
||||
// The operator for combining multiple filters.
|
||||
Operator op = 1;
|
||||
|
||||
// The list of filters to combine.
|
||||
// Must contain at least one filter.
|
||||
repeated Filter filters = 2;
|
||||
}
|
||||
|
||||
// A filter on a specific property.
|
||||
message PropertyFilter {
|
||||
// Property filter operator.
|
||||
enum Operator {
|
||||
// Unspecified. This value must not be used.
|
||||
OPERATOR_UNSPECIFIED = 0;
|
||||
|
||||
// Less than.
|
||||
LESS_THAN = 1;
|
||||
|
||||
// Less than or equal.
|
||||
LESS_THAN_OR_EQUAL = 2;
|
||||
|
||||
// Greater than.
|
||||
GREATER_THAN = 3;
|
||||
|
||||
// Greater than or equal.
|
||||
GREATER_THAN_OR_EQUAL = 4;
|
||||
|
||||
// Equal.
|
||||
EQUAL = 5;
|
||||
|
||||
// Has ancestor.
|
||||
HAS_ANCESTOR = 11;
|
||||
}
|
||||
|
||||
// The property to filter by.
|
||||
PropertyReference property = 1;
|
||||
|
||||
// The operator to filter by.
|
||||
Operator op = 2;
|
||||
|
||||
// The value to compare the property to.
|
||||
Value value = 3;
|
||||
}
|
||||
|
||||
// A GQL query.
|
||||
message GqlQuery {
|
||||
// A string of the format described
|
||||
// [here](https://developers.google.com/datastore/docs/concepts/gql).
|
||||
string query_string = 1;
|
||||
|
||||
// When false, the query string must not contain any literals and instead
|
||||
// must bind all values. For example,
|
||||
// `SELECT * FROM Kind WHERE a = 'string literal'` is not allowed, while
|
||||
// `SELECT * FROM Kind WHERE a = @value` is.
|
||||
bool allow_literals = 2;
|
||||
|
||||
// For each non-reserved named binding site in the query string,
|
||||
// there must be a named parameter with that name,
|
||||
// but not necessarily the inverse.
|
||||
// Key must match regex `[A-Za-z_$][A-Za-z_$0-9]*`, must not match regex
|
||||
// `__.*__`, and must not be `""`.
|
||||
map<string, GqlQueryParameter> named_bindings = 5;
|
||||
|
||||
// Numbered binding site @1 references the first numbered parameter,
|
||||
// effectively using 1-based indexing, rather than the usual 0.
|
||||
// For each binding site numbered i in `query_string`,
|
||||
// there must be an i-th numbered parameter.
|
||||
// The inverse must also be true.
|
||||
repeated GqlQueryParameter positional_bindings = 4;
|
||||
}
|
||||
|
||||
// A binding parameter for a GQL query.
|
||||
message GqlQueryParameter {
|
||||
// The type of parameter.
|
||||
oneof parameter_type {
|
||||
// Value.
|
||||
Value value = 2;
|
||||
|
||||
// Cursor.
|
||||
bytes cursor = 3;
|
||||
}
|
||||
}
|
||||
|
||||
// A batch of results produced by a query.
|
||||
message QueryResultBatch {
|
||||
// The possible values for the `more_results` field.
|
||||
enum MoreResultsType {
|
||||
// Unspecified. This value is never used.
|
||||
MORE_RESULTS_TYPE_UNSPECIFIED = 0;
|
||||
|
||||
// There may be additional batches to fetch from this query.
|
||||
NOT_FINISHED = 1;
|
||||
|
||||
// The query is finished, but there may be more results after the limit.
|
||||
MORE_RESULTS_AFTER_LIMIT = 2;
|
||||
|
||||
// The query is finished, but there may be more results after the end cursor.
|
||||
MORE_RESULTS_AFTER_CURSOR = 4;
|
||||
|
||||
// The query has been exhausted.
|
||||
NO_MORE_RESULTS = 3;
|
||||
}
|
||||
|
||||
// The number of results skipped, typically because of an offset.
|
||||
int32 skipped_results = 6;
|
||||
|
||||
// A cursor that points to the position after the last skipped result.
|
||||
// Will be set when `skipped_results` != 0.
|
||||
bytes skipped_cursor = 3;
|
||||
|
||||
// The result type for every entity in `entity_results`.
|
||||
EntityResult.ResultType entity_result_type = 1;
|
||||
|
||||
// The results for this batch.
|
||||
repeated EntityResult entity_results = 2;
|
||||
|
||||
// A cursor that points to the position after the last result in the batch.
|
||||
bytes end_cursor = 4;
|
||||
|
||||
// The state of the query after the current batch.
|
||||
MoreResultsType more_results = 5;
|
||||
}
|
|
@ -0,0 +1,279 @@
|
|||
import abc
|
||||
from grpc.beta import implementations as beta_implementations
|
||||
from grpc.early_adopter import implementations as early_adopter_implementations
|
||||
from grpc.framework.alpha import utilities as alpha_utilities
|
||||
from grpc.framework.common import cardinality
|
||||
from grpc.framework.interfaces.face import utilities as face_utilities
|
||||
class EarlyAdopterDatastoreServicer(object):
|
||||
"""<fill me in later!>"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
@abc.abstractmethod
|
||||
def Lookup(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def RunQuery(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def BeginTransaction(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def Commit(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def Rollback(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def AllocateIds(self, request, context):
|
||||
raise NotImplementedError()
|
||||
class EarlyAdopterDatastoreServer(object):
|
||||
"""<fill me in later!>"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
@abc.abstractmethod
|
||||
def start(self):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def stop(self):
|
||||
raise NotImplementedError()
|
||||
class EarlyAdopterDatastoreStub(object):
|
||||
"""<fill me in later!>"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
@abc.abstractmethod
|
||||
def Lookup(self, request):
|
||||
raise NotImplementedError()
|
||||
Lookup.async = None
|
||||
@abc.abstractmethod
|
||||
def RunQuery(self, request):
|
||||
raise NotImplementedError()
|
||||
RunQuery.async = None
|
||||
@abc.abstractmethod
|
||||
def BeginTransaction(self, request):
|
||||
raise NotImplementedError()
|
||||
BeginTransaction.async = None
|
||||
@abc.abstractmethod
|
||||
def Commit(self, request):
|
||||
raise NotImplementedError()
|
||||
Commit.async = None
|
||||
@abc.abstractmethod
|
||||
def Rollback(self, request):
|
||||
raise NotImplementedError()
|
||||
Rollback.async = None
|
||||
@abc.abstractmethod
|
||||
def AllocateIds(self, request):
|
||||
raise NotImplementedError()
|
||||
AllocateIds.async = None
|
||||
def early_adopter_create_Datastore_server(servicer, port, private_key=None, certificate_chain=None):
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
method_service_descriptions = {
|
||||
"AllocateIds": alpha_utilities.unary_unary_service_description(
|
||||
servicer.AllocateIds,
|
||||
gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.FromString,
|
||||
gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.SerializeToString,
|
||||
),
|
||||
"BeginTransaction": alpha_utilities.unary_unary_service_description(
|
||||
servicer.BeginTransaction,
|
||||
gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.FromString,
|
||||
gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.SerializeToString,
|
||||
),
|
||||
"Commit": alpha_utilities.unary_unary_service_description(
|
||||
servicer.Commit,
|
||||
gcloud.datastore._generated.datastore_pb2.CommitRequest.FromString,
|
||||
gcloud.datastore._generated.datastore_pb2.CommitResponse.SerializeToString,
|
||||
),
|
||||
"Lookup": alpha_utilities.unary_unary_service_description(
|
||||
servicer.Lookup,
|
||||
gcloud.datastore._generated.datastore_pb2.LookupRequest.FromString,
|
||||
gcloud.datastore._generated.datastore_pb2.LookupResponse.SerializeToString,
|
||||
),
|
||||
"Rollback": alpha_utilities.unary_unary_service_description(
|
||||
servicer.Rollback,
|
||||
gcloud.datastore._generated.datastore_pb2.RollbackRequest.FromString,
|
||||
gcloud.datastore._generated.datastore_pb2.RollbackResponse.SerializeToString,
|
||||
),
|
||||
"RunQuery": alpha_utilities.unary_unary_service_description(
|
||||
servicer.RunQuery,
|
||||
gcloud.datastore._generated.datastore_pb2.RunQueryRequest.FromString,
|
||||
gcloud.datastore._generated.datastore_pb2.RunQueryResponse.SerializeToString,
|
||||
),
|
||||
}
|
||||
return early_adopter_implementations.server("google.datastore.v1beta3.Datastore", method_service_descriptions, port, private_key=private_key, certificate_chain=certificate_chain)
|
||||
def early_adopter_create_Datastore_stub(host, port, metadata_transformer=None, secure=False, root_certificates=None, private_key=None, certificate_chain=None, server_host_override=None):
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
method_invocation_descriptions = {
|
||||
"AllocateIds": alpha_utilities.unary_unary_invocation_description(
|
||||
gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.SerializeToString,
|
||||
gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.FromString,
|
||||
),
|
||||
"BeginTransaction": alpha_utilities.unary_unary_invocation_description(
|
||||
gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.SerializeToString,
|
||||
gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.FromString,
|
||||
),
|
||||
"Commit": alpha_utilities.unary_unary_invocation_description(
|
||||
gcloud.datastore._generated.datastore_pb2.CommitRequest.SerializeToString,
|
||||
gcloud.datastore._generated.datastore_pb2.CommitResponse.FromString,
|
||||
),
|
||||
"Lookup": alpha_utilities.unary_unary_invocation_description(
|
||||
gcloud.datastore._generated.datastore_pb2.LookupRequest.SerializeToString,
|
||||
gcloud.datastore._generated.datastore_pb2.LookupResponse.FromString,
|
||||
),
|
||||
"Rollback": alpha_utilities.unary_unary_invocation_description(
|
||||
gcloud.datastore._generated.datastore_pb2.RollbackRequest.SerializeToString,
|
||||
gcloud.datastore._generated.datastore_pb2.RollbackResponse.FromString,
|
||||
),
|
||||
"RunQuery": alpha_utilities.unary_unary_invocation_description(
|
||||
gcloud.datastore._generated.datastore_pb2.RunQueryRequest.SerializeToString,
|
||||
gcloud.datastore._generated.datastore_pb2.RunQueryResponse.FromString,
|
||||
),
|
||||
}
|
||||
return early_adopter_implementations.stub("google.datastore.v1beta3.Datastore", method_invocation_descriptions, host, port, metadata_transformer=metadata_transformer, secure=secure, root_certificates=root_certificates, private_key=private_key, certificate_chain=certificate_chain, server_host_override=server_host_override)
|
||||
|
||||
class BetaDatastoreServicer(object):
|
||||
"""<fill me in later!>"""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
@abc.abstractmethod
|
||||
def Lookup(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def RunQuery(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def BeginTransaction(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def Commit(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def Rollback(self, request, context):
|
||||
raise NotImplementedError()
|
||||
@abc.abstractmethod
|
||||
def AllocateIds(self, request, context):
|
||||
raise NotImplementedError()
|
||||
|
||||
class BetaDatastoreStub(object):
|
||||
"""The interface to which stubs will conform."""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
@abc.abstractmethod
|
||||
def Lookup(self, request, timeout):
|
||||
raise NotImplementedError()
|
||||
Lookup.future = None
|
||||
@abc.abstractmethod
|
||||
def RunQuery(self, request, timeout):
|
||||
raise NotImplementedError()
|
||||
RunQuery.future = None
|
||||
@abc.abstractmethod
|
||||
def BeginTransaction(self, request, timeout):
|
||||
raise NotImplementedError()
|
||||
BeginTransaction.future = None
|
||||
@abc.abstractmethod
|
||||
def Commit(self, request, timeout):
|
||||
raise NotImplementedError()
|
||||
Commit.future = None
|
||||
@abc.abstractmethod
|
||||
def Rollback(self, request, timeout):
|
||||
raise NotImplementedError()
|
||||
Rollback.future = None
|
||||
@abc.abstractmethod
|
||||
def AllocateIds(self, request, timeout):
|
||||
raise NotImplementedError()
|
||||
AllocateIds.future = None
|
||||
|
||||
def beta_create_Datastore_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
request_deserializers = {
|
||||
('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitRequest.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupRequest.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackRequest.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryRequest.FromString,
|
||||
}
|
||||
response_serializers = {
|
||||
('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitResponse.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupResponse.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackResponse.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryResponse.SerializeToString,
|
||||
}
|
||||
method_implementations = {
|
||||
('google.datastore.v1beta3.Datastore', 'AllocateIds'): face_utilities.unary_unary_inline(servicer.AllocateIds),
|
||||
('google.datastore.v1beta3.Datastore', 'BeginTransaction'): face_utilities.unary_unary_inline(servicer.BeginTransaction),
|
||||
('google.datastore.v1beta3.Datastore', 'Commit'): face_utilities.unary_unary_inline(servicer.Commit),
|
||||
('google.datastore.v1beta3.Datastore', 'Lookup'): face_utilities.unary_unary_inline(servicer.Lookup),
|
||||
('google.datastore.v1beta3.Datastore', 'Rollback'): face_utilities.unary_unary_inline(servicer.Rollback),
|
||||
('google.datastore.v1beta3.Datastore', 'RunQuery'): face_utilities.unary_unary_inline(servicer.RunQuery),
|
||||
}
|
||||
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
|
||||
return beta_implementations.server(method_implementations, options=server_options)
|
||||
|
||||
def beta_create_Datastore_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
import gcloud.datastore._generated.datastore_pb2
|
||||
request_serializers = {
|
||||
('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsRequest.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionRequest.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitRequest.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupRequest.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackRequest.SerializeToString,
|
||||
('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryRequest.SerializeToString,
|
||||
}
|
||||
response_deserializers = {
|
||||
('google.datastore.v1beta3.Datastore', 'AllocateIds'): gcloud.datastore._generated.datastore_pb2.AllocateIdsResponse.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'BeginTransaction'): gcloud.datastore._generated.datastore_pb2.BeginTransactionResponse.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'Commit'): gcloud.datastore._generated.datastore_pb2.CommitResponse.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'Lookup'): gcloud.datastore._generated.datastore_pb2.LookupResponse.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'Rollback'): gcloud.datastore._generated.datastore_pb2.RollbackResponse.FromString,
|
||||
('google.datastore.v1beta3.Datastore', 'RunQuery'): gcloud.datastore._generated.datastore_pb2.RunQueryResponse.FromString,
|
||||
}
|
||||
cardinalities = {
|
||||
'AllocateIds': cardinality.Cardinality.UNARY_UNARY,
|
||||
'BeginTransaction': cardinality.Cardinality.UNARY_UNARY,
|
||||
'Commit': cardinality.Cardinality.UNARY_UNARY,
|
||||
'Lookup': cardinality.Cardinality.UNARY_UNARY,
|
||||
'Rollback': cardinality.Cardinality.UNARY_UNARY,
|
||||
'RunQuery': cardinality.Cardinality.UNARY_UNARY,
|
||||
}
|
||||
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
|
||||
return beta_implementations.dynamic_stub(channel, 'google.datastore.v1beta3.Datastore', cardinalities, options=stub_options)
|
|
@ -0,0 +1,862 @@
|
|||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/datastore/v1beta3/datastore.proto
|
||||
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
|
||||
from gcloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1beta3_dot_entity__pb2
|
||||
from gcloud.datastore._generated import query_pb2 as google_dot_datastore_dot_v1beta3_dot_query__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/datastore/v1beta3/datastore.proto',
|
||||
package='google.datastore.v1beta3',
|
||||
syntax='proto3',
|
||||
serialized_pb=b'\n(google/datastore/v1beta3/datastore.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a%google/datastore/v1beta3/entity.proto\x1a$google/datastore/v1beta3/query.proto\"\x8d\x01\n\rLookupRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12;\n\x0cread_options\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.ReadOptions\x12+\n\x04keys\x18\x03 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xb1\x01\n\x0eLookupResponse\x12\x35\n\x05\x66ound\x18\x01 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12\x37\n\x07missing\x18\x02 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12/\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\x98\x02\n\x0fRunQueryRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12;\n\x0cpartition_id\x18\x02 \x01(\x0b\x32%.google.datastore.v1beta3.PartitionId\x12;\n\x0cread_options\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.ReadOptions\x12\x30\n\x05query\x18\x03 \x01(\x0b\x32\x1f.google.datastore.v1beta3.QueryH\x00\x12\x37\n\tgql_query\x18\x07 \x01(\x0b\x32\".google.datastore.v1beta3.GqlQueryH\x00\x42\x0c\n\nquery_type\"}\n\x10RunQueryResponse\x12\x39\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32*.google.datastore.v1beta3.QueryResultBatch\x12.\n\x05query\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Query\"-\n\x17\x42\x65ginTransactionRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\":\n\x0fRollbackRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"\x12\n\x10RollbackResponse\"\x8d\x02\n\rCommitRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12:\n\x04mode\x18\x05 \x01(\x0e\x32,.google.datastore.v1beta3.CommitRequest.Mode\x12\x15\n\x0btransaction\x18\x01 \x01(\x0cH\x00\x12\x35\n\tmutations\x18\x06 \x03(\x0b\x32\".google.datastore.v1beta3.Mutation\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\x42\x16\n\x14transaction_selector\"k\n\x0e\x43ommitResponse\x12\x42\n\x10mutation_results\x18\x03 \x03(\x0b\x32(.google.datastore.v1beta3.MutationResult\x12\x15\n\rindex_updates\x18\x04 \x01(\x05\"U\n\x12\x41llocateIdsRequest\x12\x12\n\nproject_id\x18\x08 \x01(\t\x12+\n\x04keys\x18\x01 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"B\n\x13\x41llocateIdsResponse\x12+\n\x04keys\x18\x01 \x03(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xe4\x01\n\x08Mutation\x12\x32\n\x06insert\x18\x04 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12\x32\n\x06update\x18\x05 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12\x32\n\x06upsert\x18\x06 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12/\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x1d.google.datastore.v1beta3.KeyH\x00\x42\x0b\n\toperation\"<\n\x0eMutationResult\x12*\n\x03key\x18\x03 \x01(\x0b\x32\x1d.google.datastore.v1beta3.Key\"\xda\x01\n\x0bReadOptions\x12Q\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x35.google.datastore.v1beta3.ReadOptions.ReadConsistencyH\x00\x12\x15\n\x0btransaction\x18\x02 \x01(\x0cH\x00\"M\n\x0fReadConsistency\x12 \n\x1cREAD_CONSISTENCY_UNSPECIFIED\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\x42\x12\n\x10\x63onsistency_type2\xb7\x07\n\tDatastore\x12\x8d\x01\n\x06Lookup\x12\'.google.datastore.v1beta3.LookupRequest\x1a(.google.datastore.v1beta3.LookupResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1beta3/projects/{project_id}:lookup:\x01*\x12\x95\x01\n\x08RunQuery\x12).google.datastore.v1beta3.RunQueryRequest\x1a*.google.datastore.v1beta3.RunQueryResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1beta3/projects/{project_id}:runQuery:\x01*\x12\xb5\x01\n\x10\x42\x65ginTransaction\x12\x31.google.datastore.v1beta3.BeginTransactionRequest\x1a\x32.google.datastore.v1beta3.BeginTransactionResponse\":\x82\xd3\xe4\x93\x02\x34\"//v1beta3/projects/{project_id}:beginTransaction:\x01*\x12\x8d\x01\n\x06\x43ommit\x12\'.google.datastore.v1beta3.CommitRequest\x1a(.google.datastore.v1beta3.CommitResponse\"0\x82\xd3\xe4\x93\x02*\"%/v1beta3/projects/{project_id}:commit:\x01*\x12\x95\x01\n\x08Rollback\x12).google.datastore.v1beta3.RollbackRequest\x1a*.google.datastore.v1beta3.RollbackResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1beta3/projects/{project_id}:rollback:\x01*\x12\xa1\x01\n\x0b\x41llocateIds\x12,.google.datastore.v1beta3.AllocateIdsRequest\x1a-.google.datastore.v1beta3.AllocateIdsResponse\"5\x82\xd3\xe4\x93\x02/\"*/v1beta3/projects/{project_id}:allocateIds:\x01*B0\n\x1c\x63om.google.datastore.v1beta3B\x0e\x44\x61tastoreProtoP\x01\x62\x06proto3'
|
||||
,
|
||||
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_entity__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_query__pb2.DESCRIPTOR,])
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
_COMMITREQUEST_MODE = _descriptor.EnumDescriptor(
|
||||
name='Mode',
|
||||
full_name='google.datastore.v1beta3.CommitRequest.Mode',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='MODE_UNSPECIFIED', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='TRANSACTIONAL', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='NON_TRANSACTIONAL', index=2, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=1263,
|
||||
serialized_end=1333,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE)
|
||||
|
||||
_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor(
|
||||
name='ReadConsistency',
|
||||
full_name='google.datastore.v1beta3.ReadOptions.ReadConsistency',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='READ_CONSISTENCY_UNSPECIFIED', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='STRONG', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='EVENTUAL', index=2, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=2038,
|
||||
serialized_end=2115,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY)
|
||||
|
||||
|
||||
_LOOKUPREQUEST = _descriptor.Descriptor(
|
||||
name='LookupRequest',
|
||||
full_name='google.datastore.v1beta3.LookupRequest',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='project_id', full_name='google.datastore.v1beta3.LookupRequest.project_id', index=0,
|
||||
number=8, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='read_options', full_name='google.datastore.v1beta3.LookupRequest.read_options', index=1,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='keys', full_name='google.datastore.v1beta3.LookupRequest.keys', index=2,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=178,
|
||||
serialized_end=319,
|
||||
)
|
||||
|
||||
|
||||
_LOOKUPRESPONSE = _descriptor.Descriptor(
|
||||
name='LookupResponse',
|
||||
full_name='google.datastore.v1beta3.LookupResponse',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='found', full_name='google.datastore.v1beta3.LookupResponse.found', index=0,
|
||||
number=1, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='missing', full_name='google.datastore.v1beta3.LookupResponse.missing', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='deferred', full_name='google.datastore.v1beta3.LookupResponse.deferred', index=2,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=322,
|
||||
serialized_end=499,
|
||||
)
|
||||
|
||||
|
||||
_RUNQUERYREQUEST = _descriptor.Descriptor(
|
||||
name='RunQueryRequest',
|
||||
full_name='google.datastore.v1beta3.RunQueryRequest',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='project_id', full_name='google.datastore.v1beta3.RunQueryRequest.project_id', index=0,
|
||||
number=8, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='partition_id', full_name='google.datastore.v1beta3.RunQueryRequest.partition_id', index=1,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='read_options', full_name='google.datastore.v1beta3.RunQueryRequest.read_options', index=2,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='query', full_name='google.datastore.v1beta3.RunQueryRequest.query', index=3,
|
||||
number=3, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='gql_query', full_name='google.datastore.v1beta3.RunQueryRequest.gql_query', index=4,
|
||||
number=7, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='query_type', full_name='google.datastore.v1beta3.RunQueryRequest.query_type',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=502,
|
||||
serialized_end=782,
|
||||
)
|
||||
|
||||
|
||||
_RUNQUERYRESPONSE = _descriptor.Descriptor(
|
||||
name='RunQueryResponse',
|
||||
full_name='google.datastore.v1beta3.RunQueryResponse',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='batch', full_name='google.datastore.v1beta3.RunQueryResponse.batch', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='query', full_name='google.datastore.v1beta3.RunQueryResponse.query', index=1,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=784,
|
||||
serialized_end=909,
|
||||
)
|
||||
|
||||
|
||||
_BEGINTRANSACTIONREQUEST = _descriptor.Descriptor(
|
||||
name='BeginTransactionRequest',
|
||||
full_name='google.datastore.v1beta3.BeginTransactionRequest',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='project_id', full_name='google.datastore.v1beta3.BeginTransactionRequest.project_id', index=0,
|
||||
number=8, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=911,
|
||||
serialized_end=956,
|
||||
)
|
||||
|
||||
|
||||
_BEGINTRANSACTIONRESPONSE = _descriptor.Descriptor(
|
||||
name='BeginTransactionResponse',
|
||||
full_name='google.datastore.v1beta3.BeginTransactionResponse',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='transaction', full_name='google.datastore.v1beta3.BeginTransactionResponse.transaction', index=0,
|
||||
number=1, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=958,
|
||||
serialized_end=1005,
|
||||
)
|
||||
|
||||
|
||||
_ROLLBACKREQUEST = _descriptor.Descriptor(
|
||||
name='RollbackRequest',
|
||||
full_name='google.datastore.v1beta3.RollbackRequest',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='project_id', full_name='google.datastore.v1beta3.RollbackRequest.project_id', index=0,
|
||||
number=8, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='transaction', full_name='google.datastore.v1beta3.RollbackRequest.transaction', index=1,
|
||||
number=1, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1007,
|
||||
serialized_end=1065,
|
||||
)
|
||||
|
||||
|
||||
_ROLLBACKRESPONSE = _descriptor.Descriptor(
|
||||
name='RollbackResponse',
|
||||
full_name='google.datastore.v1beta3.RollbackResponse',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1067,
|
||||
serialized_end=1085,
|
||||
)
|
||||
|
||||
|
||||
_COMMITREQUEST = _descriptor.Descriptor(
|
||||
name='CommitRequest',
|
||||
full_name='google.datastore.v1beta3.CommitRequest',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='project_id', full_name='google.datastore.v1beta3.CommitRequest.project_id', index=0,
|
||||
number=8, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='mode', full_name='google.datastore.v1beta3.CommitRequest.mode', index=1,
|
||||
number=5, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='transaction', full_name='google.datastore.v1beta3.CommitRequest.transaction', index=2,
|
||||
number=1, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='mutations', full_name='google.datastore.v1beta3.CommitRequest.mutations', index=3,
|
||||
number=6, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
_COMMITREQUEST_MODE,
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='transaction_selector', full_name='google.datastore.v1beta3.CommitRequest.transaction_selector',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=1088,
|
||||
serialized_end=1357,
|
||||
)
|
||||
|
||||
|
||||
_COMMITRESPONSE = _descriptor.Descriptor(
|
||||
name='CommitResponse',
|
||||
full_name='google.datastore.v1beta3.CommitResponse',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='mutation_results', full_name='google.datastore.v1beta3.CommitResponse.mutation_results', index=0,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='index_updates', full_name='google.datastore.v1beta3.CommitResponse.index_updates', index=1,
|
||||
number=4, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1359,
|
||||
serialized_end=1466,
|
||||
)
|
||||
|
||||
|
||||
_ALLOCATEIDSREQUEST = _descriptor.Descriptor(
|
||||
name='AllocateIdsRequest',
|
||||
full_name='google.datastore.v1beta3.AllocateIdsRequest',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='project_id', full_name='google.datastore.v1beta3.AllocateIdsRequest.project_id', index=0,
|
||||
number=8, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='keys', full_name='google.datastore.v1beta3.AllocateIdsRequest.keys', index=1,
|
||||
number=1, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1468,
|
||||
serialized_end=1553,
|
||||
)
|
||||
|
||||
|
||||
_ALLOCATEIDSRESPONSE = _descriptor.Descriptor(
|
||||
name='AllocateIdsResponse',
|
||||
full_name='google.datastore.v1beta3.AllocateIdsResponse',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='keys', full_name='google.datastore.v1beta3.AllocateIdsResponse.keys', index=0,
|
||||
number=1, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1555,
|
||||
serialized_end=1621,
|
||||
)
|
||||
|
||||
|
||||
_MUTATION = _descriptor.Descriptor(
|
||||
name='Mutation',
|
||||
full_name='google.datastore.v1beta3.Mutation',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='insert', full_name='google.datastore.v1beta3.Mutation.insert', index=0,
|
||||
number=4, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='update', full_name='google.datastore.v1beta3.Mutation.update', index=1,
|
||||
number=5, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='upsert', full_name='google.datastore.v1beta3.Mutation.upsert', index=2,
|
||||
number=6, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='delete', full_name='google.datastore.v1beta3.Mutation.delete', index=3,
|
||||
number=7, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='operation', full_name='google.datastore.v1beta3.Mutation.operation',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=1624,
|
||||
serialized_end=1852,
|
||||
)
|
||||
|
||||
|
||||
_MUTATIONRESULT = _descriptor.Descriptor(
|
||||
name='MutationResult',
|
||||
full_name='google.datastore.v1beta3.MutationResult',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='key', full_name='google.datastore.v1beta3.MutationResult.key', index=0,
|
||||
number=3, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1854,
|
||||
serialized_end=1914,
|
||||
)
|
||||
|
||||
|
||||
_READOPTIONS = _descriptor.Descriptor(
|
||||
name='ReadOptions',
|
||||
full_name='google.datastore.v1beta3.ReadOptions',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='read_consistency', full_name='google.datastore.v1beta3.ReadOptions.read_consistency', index=0,
|
||||
number=1, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='transaction', full_name='google.datastore.v1beta3.ReadOptions.transaction', index=1,
|
||||
number=2, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
_READOPTIONS_READCONSISTENCY,
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='consistency_type', full_name='google.datastore.v1beta3.ReadOptions.consistency_type',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=1917,
|
||||
serialized_end=2135,
|
||||
)
|
||||
|
||||
_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS
|
||||
_LOOKUPREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
|
||||
_LOOKUPRESPONSE.fields_by_name['found'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._ENTITYRESULT
|
||||
_LOOKUPRESPONSE.fields_by_name['missing'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._ENTITYRESULT
|
||||
_LOOKUPRESPONSE.fields_by_name['deferred'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
|
||||
_RUNQUERYREQUEST.fields_by_name['partition_id'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._PARTITIONID
|
||||
_RUNQUERYREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS
|
||||
_RUNQUERYREQUEST.fields_by_name['query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERY
|
||||
_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._GQLQUERY
|
||||
_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append(
|
||||
_RUNQUERYREQUEST.fields_by_name['query'])
|
||||
_RUNQUERYREQUEST.fields_by_name['query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type']
|
||||
_RUNQUERYREQUEST.oneofs_by_name['query_type'].fields.append(
|
||||
_RUNQUERYREQUEST.fields_by_name['gql_query'])
|
||||
_RUNQUERYREQUEST.fields_by_name['gql_query'].containing_oneof = _RUNQUERYREQUEST.oneofs_by_name['query_type']
|
||||
_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERYRESULTBATCH
|
||||
_RUNQUERYRESPONSE.fields_by_name['query'].message_type = google_dot_datastore_dot_v1beta3_dot_query__pb2._QUERY
|
||||
_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE
|
||||
_COMMITREQUEST.fields_by_name['mutations'].message_type = _MUTATION
|
||||
_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST
|
||||
_COMMITREQUEST.oneofs_by_name['transaction_selector'].fields.append(
|
||||
_COMMITREQUEST.fields_by_name['transaction'])
|
||||
_COMMITREQUEST.fields_by_name['transaction'].containing_oneof = _COMMITREQUEST.oneofs_by_name['transaction_selector']
|
||||
_COMMITRESPONSE.fields_by_name['mutation_results'].message_type = _MUTATIONRESULT
|
||||
_ALLOCATEIDSREQUEST.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
|
||||
_ALLOCATEIDSRESPONSE.fields_by_name['keys'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
|
||||
_MUTATION.fields_by_name['insert'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
|
||||
_MUTATION.fields_by_name['update'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
|
||||
_MUTATION.fields_by_name['upsert'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
|
||||
_MUTATION.fields_by_name['delete'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
|
||||
_MUTATION.oneofs_by_name['operation'].fields.append(
|
||||
_MUTATION.fields_by_name['insert'])
|
||||
_MUTATION.fields_by_name['insert'].containing_oneof = _MUTATION.oneofs_by_name['operation']
|
||||
_MUTATION.oneofs_by_name['operation'].fields.append(
|
||||
_MUTATION.fields_by_name['update'])
|
||||
_MUTATION.fields_by_name['update'].containing_oneof = _MUTATION.oneofs_by_name['operation']
|
||||
_MUTATION.oneofs_by_name['operation'].fields.append(
|
||||
_MUTATION.fields_by_name['upsert'])
|
||||
_MUTATION.fields_by_name['upsert'].containing_oneof = _MUTATION.oneofs_by_name['operation']
|
||||
_MUTATION.oneofs_by_name['operation'].fields.append(
|
||||
_MUTATION.fields_by_name['delete'])
|
||||
_MUTATION.fields_by_name['delete'].containing_oneof = _MUTATION.oneofs_by_name['operation']
|
||||
_MUTATIONRESULT.fields_by_name['key'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._KEY
|
||||
_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY
|
||||
_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS
|
||||
_READOPTIONS.oneofs_by_name['consistency_type'].fields.append(
|
||||
_READOPTIONS.fields_by_name['read_consistency'])
|
||||
_READOPTIONS.fields_by_name['read_consistency'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type']
|
||||
_READOPTIONS.oneofs_by_name['consistency_type'].fields.append(
|
||||
_READOPTIONS.fields_by_name['transaction'])
|
||||
_READOPTIONS.fields_by_name['transaction'].containing_oneof = _READOPTIONS.oneofs_by_name['consistency_type']
|
||||
DESCRIPTOR.message_types_by_name['LookupRequest'] = _LOOKUPREQUEST
|
||||
DESCRIPTOR.message_types_by_name['LookupResponse'] = _LOOKUPRESPONSE
|
||||
DESCRIPTOR.message_types_by_name['RunQueryRequest'] = _RUNQUERYREQUEST
|
||||
DESCRIPTOR.message_types_by_name['RunQueryResponse'] = _RUNQUERYRESPONSE
|
||||
DESCRIPTOR.message_types_by_name['BeginTransactionRequest'] = _BEGINTRANSACTIONREQUEST
|
||||
DESCRIPTOR.message_types_by_name['BeginTransactionResponse'] = _BEGINTRANSACTIONRESPONSE
|
||||
DESCRIPTOR.message_types_by_name['RollbackRequest'] = _ROLLBACKREQUEST
|
||||
DESCRIPTOR.message_types_by_name['RollbackResponse'] = _ROLLBACKRESPONSE
|
||||
DESCRIPTOR.message_types_by_name['CommitRequest'] = _COMMITREQUEST
|
||||
DESCRIPTOR.message_types_by_name['CommitResponse'] = _COMMITRESPONSE
|
||||
DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST
|
||||
DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE
|
||||
DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION
|
||||
DESCRIPTOR.message_types_by_name['MutationResult'] = _MUTATIONRESULT
|
||||
DESCRIPTOR.message_types_by_name['ReadOptions'] = _READOPTIONS
|
||||
|
||||
LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict(
|
||||
DESCRIPTOR = _LOOKUPREQUEST,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.LookupRequest)
|
||||
))
|
||||
_sym_db.RegisterMessage(LookupRequest)
|
||||
|
||||
LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict(
|
||||
DESCRIPTOR = _LOOKUPRESPONSE,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.LookupResponse)
|
||||
))
|
||||
_sym_db.RegisterMessage(LookupResponse)
|
||||
|
||||
RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict(
|
||||
DESCRIPTOR = _RUNQUERYREQUEST,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RunQueryRequest)
|
||||
))
|
||||
_sym_db.RegisterMessage(RunQueryRequest)
|
||||
|
||||
RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict(
|
||||
DESCRIPTOR = _RUNQUERYRESPONSE,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RunQueryResponse)
|
||||
))
|
||||
_sym_db.RegisterMessage(RunQueryResponse)
|
||||
|
||||
BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict(
|
||||
DESCRIPTOR = _BEGINTRANSACTIONREQUEST,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.BeginTransactionRequest)
|
||||
))
|
||||
_sym_db.RegisterMessage(BeginTransactionRequest)
|
||||
|
||||
BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict(
|
||||
DESCRIPTOR = _BEGINTRANSACTIONRESPONSE,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.BeginTransactionResponse)
|
||||
))
|
||||
_sym_db.RegisterMessage(BeginTransactionResponse)
|
||||
|
||||
RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ROLLBACKREQUEST,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RollbackRequest)
|
||||
))
|
||||
_sym_db.RegisterMessage(RollbackRequest)
|
||||
|
||||
RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ROLLBACKRESPONSE,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.RollbackResponse)
|
||||
))
|
||||
_sym_db.RegisterMessage(RollbackResponse)
|
||||
|
||||
CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict(
|
||||
DESCRIPTOR = _COMMITREQUEST,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CommitRequest)
|
||||
))
|
||||
_sym_db.RegisterMessage(CommitRequest)
|
||||
|
||||
CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict(
|
||||
DESCRIPTOR = _COMMITRESPONSE,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CommitResponse)
|
||||
))
|
||||
_sym_db.RegisterMessage(CommitResponse)
|
||||
|
||||
AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ALLOCATEIDSREQUEST,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.AllocateIdsRequest)
|
||||
))
|
||||
_sym_db.RegisterMessage(AllocateIdsRequest)
|
||||
|
||||
AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ALLOCATEIDSRESPONSE,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.AllocateIdsResponse)
|
||||
))
|
||||
_sym_db.RegisterMessage(AllocateIdsResponse)
|
||||
|
||||
Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict(
|
||||
DESCRIPTOR = _MUTATION,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Mutation)
|
||||
))
|
||||
_sym_db.RegisterMessage(Mutation)
|
||||
|
||||
MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict(
|
||||
DESCRIPTOR = _MUTATIONRESULT,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.MutationResult)
|
||||
))
|
||||
_sym_db.RegisterMessage(MutationResult)
|
||||
|
||||
ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict(
|
||||
DESCRIPTOR = _READOPTIONS,
|
||||
__module__ = 'google.datastore.v1beta3.datastore_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.ReadOptions)
|
||||
))
|
||||
_sym_db.RegisterMessage(ReadOptions)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\016DatastoreProtoP\001')
|
||||
# @@protoc_insertion_point(module_scope)
|
493
venv/Lib/site-packages/gcloud/datastore/_generated/entity_pb2.py
Normal file
493
venv/Lib/site-packages/gcloud/datastore/_generated/entity_pb2.py
Normal file
|
@ -0,0 +1,493 @@
|
|||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/datastore/v1beta3/entity.proto
|
||||
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
|
||||
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
|
||||
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
|
||||
from google.type import latlng_pb2 as google_dot_type_dot_latlng__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/datastore/v1beta3/entity.proto',
|
||||
package='google.datastore.v1beta3',
|
||||
syntax='proto3',
|
||||
serialized_pb=b'\n%google/datastore/v1beta3/entity.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x18google/type/latlng.proto\"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t\"\xc1\x01\n\x03Key\x12;\n\x0cpartition_id\x18\x01 \x01(\x0b\x32%.google.datastore.v1beta3.PartitionId\x12\x37\n\x04path\x18\x02 \x03(\x0b\x32).google.datastore.v1beta3.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type\"=\n\nArrayValue\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.datastore.v1beta3.Value\"\x80\x04\n\x05Value\x12\x30\n\nnull_value\x18\x0b \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x17\n\rboolean_value\x18\x01 \x01(\x08H\x00\x12\x17\n\rinteger_value\x18\x02 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x03 \x01(\x01H\x00\x12\x35\n\x0ftimestamp_value\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x32\n\tkey_value\x18\x05 \x01(\x0b\x32\x1d.google.datastore.v1beta3.KeyH\x00\x12\x16\n\x0cstring_value\x18\x11 \x01(\tH\x00\x12\x14\n\nblob_value\x18\x12 \x01(\x0cH\x00\x12.\n\x0fgeo_point_value\x18\x08 \x01(\x0b\x32\x13.google.type.LatLngH\x00\x12\x38\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32 .google.datastore.v1beta3.EntityH\x00\x12;\n\x0b\x61rray_value\x18\t \x01(\x0b\x32$.google.datastore.v1beta3.ArrayValueH\x00\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x1c\n\x14\x65xclude_from_indexes\x18\x13 \x01(\x08\x42\x0c\n\nvalue_type\"\xce\x01\n\x06\x45ntity\x12*\n\x03key\x18\x01 \x01(\x0b\x32\x1d.google.datastore.v1beta3.Key\x12\x44\n\nproperties\x18\x03 \x03(\x0b\x32\x30.google.datastore.v1beta3.Entity.PropertiesEntry\x1aR\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12.\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Value:\x02\x38\x01\x42-\n\x1c\x63om.google.datastore.v1beta3B\x0b\x45ntityProtoP\x01\x62\x06proto3'
|
||||
,
|
||||
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_type_dot_latlng__pb2.DESCRIPTOR,])
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_PARTITIONID = _descriptor.Descriptor(
|
||||
name='PartitionId',
|
||||
full_name='google.datastore.v1beta3.PartitionId',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='project_id', full_name='google.datastore.v1beta3.PartitionId.project_id', index=0,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='namespace_id', full_name='google.datastore.v1beta3.PartitionId.namespace_id', index=1,
|
||||
number=4, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=186,
|
||||
serialized_end=241,
|
||||
)
|
||||
|
||||
|
||||
_KEY_PATHELEMENT = _descriptor.Descriptor(
|
||||
name='PathElement',
|
||||
full_name='google.datastore.v1beta3.Key.PathElement',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='kind', full_name='google.datastore.v1beta3.Key.PathElement.kind', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='id', full_name='google.datastore.v1beta3.Key.PathElement.id', index=1,
|
||||
number=2, type=3, cpp_type=2, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='google.datastore.v1beta3.Key.PathElement.name', index=2,
|
||||
number=3, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='id_type', full_name='google.datastore.v1beta3.Key.PathElement.id_type',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=369,
|
||||
serialized_end=437,
|
||||
)
|
||||
|
||||
_KEY = _descriptor.Descriptor(
|
||||
name='Key',
|
||||
full_name='google.datastore.v1beta3.Key',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='partition_id', full_name='google.datastore.v1beta3.Key.partition_id', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='path', full_name='google.datastore.v1beta3.Key.path', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[_KEY_PATHELEMENT, ],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=244,
|
||||
serialized_end=437,
|
||||
)
|
||||
|
||||
|
||||
_ARRAYVALUE = _descriptor.Descriptor(
|
||||
name='ArrayValue',
|
||||
full_name='google.datastore.v1beta3.ArrayValue',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='values', full_name='google.datastore.v1beta3.ArrayValue.values', index=0,
|
||||
number=1, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=439,
|
||||
serialized_end=500,
|
||||
)
|
||||
|
||||
|
||||
_VALUE = _descriptor.Descriptor(
|
||||
name='Value',
|
||||
full_name='google.datastore.v1beta3.Value',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='null_value', full_name='google.datastore.v1beta3.Value.null_value', index=0,
|
||||
number=11, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='boolean_value', full_name='google.datastore.v1beta3.Value.boolean_value', index=1,
|
||||
number=1, type=8, cpp_type=7, label=1,
|
||||
has_default_value=False, default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='integer_value', full_name='google.datastore.v1beta3.Value.integer_value', index=2,
|
||||
number=2, type=3, cpp_type=2, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='double_value', full_name='google.datastore.v1beta3.Value.double_value', index=3,
|
||||
number=3, type=1, cpp_type=5, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='timestamp_value', full_name='google.datastore.v1beta3.Value.timestamp_value', index=4,
|
||||
number=10, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='key_value', full_name='google.datastore.v1beta3.Value.key_value', index=5,
|
||||
number=5, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='string_value', full_name='google.datastore.v1beta3.Value.string_value', index=6,
|
||||
number=17, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='blob_value', full_name='google.datastore.v1beta3.Value.blob_value', index=7,
|
||||
number=18, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='geo_point_value', full_name='google.datastore.v1beta3.Value.geo_point_value', index=8,
|
||||
number=8, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='entity_value', full_name='google.datastore.v1beta3.Value.entity_value', index=9,
|
||||
number=6, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='array_value', full_name='google.datastore.v1beta3.Value.array_value', index=10,
|
||||
number=9, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='meaning', full_name='google.datastore.v1beta3.Value.meaning', index=11,
|
||||
number=14, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='exclude_from_indexes', full_name='google.datastore.v1beta3.Value.exclude_from_indexes', index=12,
|
||||
number=19, type=8, cpp_type=7, label=1,
|
||||
has_default_value=False, default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='value_type', full_name='google.datastore.v1beta3.Value.value_type',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=503,
|
||||
serialized_end=1015,
|
||||
)
|
||||
|
||||
|
||||
_ENTITY_PROPERTIESENTRY = _descriptor.Descriptor(
|
||||
name='PropertiesEntry',
|
||||
full_name='google.datastore.v1beta3.Entity.PropertiesEntry',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='key', full_name='google.datastore.v1beta3.Entity.PropertiesEntry.key', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='google.datastore.v1beta3.Entity.PropertiesEntry.value', index=1,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'),
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1142,
|
||||
serialized_end=1224,
|
||||
)
|
||||
|
||||
_ENTITY = _descriptor.Descriptor(
|
||||
name='Entity',
|
||||
full_name='google.datastore.v1beta3.Entity',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='key', full_name='google.datastore.v1beta3.Entity.key', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='properties', full_name='google.datastore.v1beta3.Entity.properties', index=1,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[_ENTITY_PROPERTIESENTRY, ],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1018,
|
||||
serialized_end=1224,
|
||||
)
|
||||
|
||||
_KEY_PATHELEMENT.containing_type = _KEY
|
||||
_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append(
|
||||
_KEY_PATHELEMENT.fields_by_name['id'])
|
||||
_KEY_PATHELEMENT.fields_by_name['id'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type']
|
||||
_KEY_PATHELEMENT.oneofs_by_name['id_type'].fields.append(
|
||||
_KEY_PATHELEMENT.fields_by_name['name'])
|
||||
_KEY_PATHELEMENT.fields_by_name['name'].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name['id_type']
|
||||
_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID
|
||||
_KEY.fields_by_name['path'].message_type = _KEY_PATHELEMENT
|
||||
_ARRAYVALUE.fields_by_name['values'].message_type = _VALUE
|
||||
_VALUE.fields_by_name['null_value'].enum_type = google_dot_protobuf_dot_struct__pb2._NULLVALUE
|
||||
_VALUE.fields_by_name['timestamp_value'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
|
||||
_VALUE.fields_by_name['key_value'].message_type = _KEY
|
||||
_VALUE.fields_by_name['geo_point_value'].message_type = google_dot_type_dot_latlng__pb2._LATLNG
|
||||
_VALUE.fields_by_name['entity_value'].message_type = _ENTITY
|
||||
_VALUE.fields_by_name['array_value'].message_type = _ARRAYVALUE
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['null_value'])
|
||||
_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['boolean_value'])
|
||||
_VALUE.fields_by_name['boolean_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['integer_value'])
|
||||
_VALUE.fields_by_name['integer_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['double_value'])
|
||||
_VALUE.fields_by_name['double_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['timestamp_value'])
|
||||
_VALUE.fields_by_name['timestamp_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['key_value'])
|
||||
_VALUE.fields_by_name['key_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['string_value'])
|
||||
_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['blob_value'])
|
||||
_VALUE.fields_by_name['blob_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['geo_point_value'])
|
||||
_VALUE.fields_by_name['geo_point_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['entity_value'])
|
||||
_VALUE.fields_by_name['entity_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_VALUE.oneofs_by_name['value_type'].fields.append(
|
||||
_VALUE.fields_by_name['array_value'])
|
||||
_VALUE.fields_by_name['array_value'].containing_oneof = _VALUE.oneofs_by_name['value_type']
|
||||
_ENTITY_PROPERTIESENTRY.fields_by_name['value'].message_type = _VALUE
|
||||
_ENTITY_PROPERTIESENTRY.containing_type = _ENTITY
|
||||
_ENTITY.fields_by_name['key'].message_type = _KEY
|
||||
_ENTITY.fields_by_name['properties'].message_type = _ENTITY_PROPERTIESENTRY
|
||||
DESCRIPTOR.message_types_by_name['PartitionId'] = _PARTITIONID
|
||||
DESCRIPTOR.message_types_by_name['Key'] = _KEY
|
||||
DESCRIPTOR.message_types_by_name['ArrayValue'] = _ARRAYVALUE
|
||||
DESCRIPTOR.message_types_by_name['Value'] = _VALUE
|
||||
DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY
|
||||
|
||||
PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict(
|
||||
DESCRIPTOR = _PARTITIONID,
|
||||
__module__ = 'google.datastore.v1beta3.entity_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PartitionId)
|
||||
))
|
||||
_sym_db.RegisterMessage(PartitionId)
|
||||
|
||||
Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict(
|
||||
|
||||
PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict(
|
||||
DESCRIPTOR = _KEY_PATHELEMENT,
|
||||
__module__ = 'google.datastore.v1beta3.entity_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Key.PathElement)
|
||||
))
|
||||
,
|
||||
DESCRIPTOR = _KEY,
|
||||
__module__ = 'google.datastore.v1beta3.entity_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Key)
|
||||
))
|
||||
_sym_db.RegisterMessage(Key)
|
||||
_sym_db.RegisterMessage(Key.PathElement)
|
||||
|
||||
ArrayValue = _reflection.GeneratedProtocolMessageType('ArrayValue', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ARRAYVALUE,
|
||||
__module__ = 'google.datastore.v1beta3.entity_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.ArrayValue)
|
||||
))
|
||||
_sym_db.RegisterMessage(ArrayValue)
|
||||
|
||||
Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict(
|
||||
DESCRIPTOR = _VALUE,
|
||||
__module__ = 'google.datastore.v1beta3.entity_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Value)
|
||||
))
|
||||
_sym_db.RegisterMessage(Value)
|
||||
|
||||
Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict(
|
||||
|
||||
PropertiesEntry = _reflection.GeneratedProtocolMessageType('PropertiesEntry', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ENTITY_PROPERTIESENTRY,
|
||||
__module__ = 'google.datastore.v1beta3.entity_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Entity.PropertiesEntry)
|
||||
))
|
||||
,
|
||||
DESCRIPTOR = _ENTITY,
|
||||
__module__ = 'google.datastore.v1beta3.entity_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Entity)
|
||||
))
|
||||
_sym_db.RegisterMessage(Entity)
|
||||
_sym_db.RegisterMessage(Entity.PropertiesEntry)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\013EntityProtoP\001')
|
||||
_ENTITY_PROPERTIESENTRY.has_options = True
|
||||
_ENTITY_PROPERTIESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001')
|
||||
# @@protoc_insertion_point(module_scope)
|
917
venv/Lib/site-packages/gcloud/datastore/_generated/query_pb2.py
Normal file
917
venv/Lib/site-packages/gcloud/datastore/_generated/query_pb2.py
Normal file
|
@ -0,0 +1,917 @@
|
|||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/datastore/v1beta3/query.proto
|
||||
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
|
||||
from gcloud.datastore._generated import entity_pb2 as google_dot_datastore_dot_v1beta3_dot_entity__pb2
|
||||
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/datastore/v1beta3/query.proto',
|
||||
package='google.datastore.v1beta3',
|
||||
syntax='proto3',
|
||||
serialized_pb=b'\n$google/datastore/v1beta3/query.proto\x12\x18google.datastore.v1beta3\x1a\x1cgoogle/api/annotations.proto\x1a%google/datastore/v1beta3/entity.proto\x1a\x1egoogle/protobuf/wrappers.proto\"\xa3\x01\n\x0c\x45ntityResult\x12\x30\n\x06\x65ntity\x18\x01 \x01(\x0b\x32 .google.datastore.v1beta3.Entity\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"Q\n\nResultType\x12\x1b\n\x17RESULT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\x8b\x03\n\x05Query\x12\x38\n\nprojection\x18\x02 \x03(\x0b\x32$.google.datastore.v1beta3.Projection\x12\x36\n\x04kind\x18\x03 \x03(\x0b\x32(.google.datastore.v1beta3.KindExpression\x12\x30\n\x06\x66ilter\x18\x04 \x01(\x0b\x32 .google.datastore.v1beta3.Filter\x12\x36\n\x05order\x18\x05 \x03(\x0b\x32\'.google.datastore.v1beta3.PropertyOrder\x12@\n\x0b\x64istinct_on\x18\x06 \x03(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x0e\n\x06offset\x18\n \x01(\x05\x12*\n\x05limit\x18\x0c \x01(\x0b\x32\x1b.google.protobuf.Int32Value\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x01(\t\"K\n\nProjection\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\"\xdb\x01\n\rPropertyOrder\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12\x44\n\tdirection\x18\x02 \x01(\x0e\x32\x31.google.datastore.v1beta3.PropertyOrder.Direction\"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\xa3\x01\n\x06\x46ilter\x12\x45\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32).google.datastore.v1beta3.CompositeFilterH\x00\x12\x43\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32(.google.datastore.v1beta3.PropertyFilterH\x00\x42\r\n\x0b\x66ilter_type\"\xb3\x01\n\x0f\x43ompositeFilter\x12>\n\x02op\x18\x01 \x01(\x0e\x32\x32.google.datastore.v1beta3.CompositeFilter.Operator\x12\x31\n\x07\x66ilters\x18\x02 \x03(\x0b\x32 .google.datastore.v1beta3.Filter\"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\"\xd6\x02\n\x0ePropertyFilter\x12=\n\x08property\x18\x01 \x01(\x0b\x32+.google.datastore.v1beta3.PropertyReference\x12=\n\x02op\x18\x02 \x01(\x0e\x32\x31.google.datastore.v1beta3.PropertyFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.datastore.v1beta3.Value\"\x95\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xb4\x02\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x01(\t\x12\x16\n\x0e\x61llow_literals\x18\x02 \x01(\x08\x12M\n\x0enamed_bindings\x18\x05 \x03(\x0b\x32\x35.google.datastore.v1beta3.GqlQuery.NamedBindingsEntry\x12H\n\x13positional_bindings\x18\x04 \x03(\x0b\x32+.google.datastore.v1beta3.GqlQueryParameter\x1a\x61\n\x12NamedBindingsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12:\n\x05value\x18\x02 \x01(\x0b\x32+.google.datastore.v1beta3.GqlQueryParameter:\x02\x38\x01\"i\n\x11GqlQueryParameter\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32\x1f.google.datastore.v1beta3.ValueH\x00\x12\x10\n\x06\x63ursor\x18\x03 \x01(\x0cH\x00\x42\x10\n\x0eparameter_type\"\xd3\x03\n\x10QueryResultBatch\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\x12\x16\n\x0eskipped_cursor\x18\x03 \x01(\x0c\x12M\n\x12\x65ntity_result_type\x18\x01 \x01(\x0e\x32\x31.google.datastore.v1beta3.EntityResult.ResultType\x12>\n\x0e\x65ntity_results\x18\x02 \x03(\x0b\x32&.google.datastore.v1beta3.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12P\n\x0cmore_results\x18\x05 \x01(\x0e\x32:.google.datastore.v1beta3.QueryResultBatch.MoreResultsType\"\x98\x01\n\x0fMoreResultsType\x12!\n\x1dMORE_RESULTS_TYPE_UNSPECIFIED\x10\x00\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x1d\n\x19MORE_RESULTS_AFTER_CURSOR\x10\x04\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\x42,\n\x1c\x63om.google.datastore.v1beta3B\nQueryProtoP\x01\x62\x06proto3'
|
||||
,
|
||||
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_datastore_dot_v1beta3_dot_entity__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,])
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
_ENTITYRESULT_RESULTTYPE = _descriptor.EnumDescriptor(
|
||||
name='ResultType',
|
||||
full_name='google.datastore.v1beta3.EntityResult.ResultType',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='RESULT_TYPE_UNSPECIFIED', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='FULL', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='PROJECTION', index=2, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='KEY_ONLY', index=3, number=3,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=250,
|
||||
serialized_end=331,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE)
|
||||
|
||||
_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor(
|
||||
name='Direction',
|
||||
full_name='google.datastore.v1beta3.PropertyOrder.Direction',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='DIRECTION_UNSPECIFIED', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='ASCENDING', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='DESCENDING', index=2, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=1026,
|
||||
serialized_end=1095,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION)
|
||||
|
||||
_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor(
|
||||
name='Operator',
|
||||
full_name='google.datastore.v1beta3.CompositeFilter.Operator',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='OPERATOR_UNSPECIFIED', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='AND', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=1398,
|
||||
serialized_end=1443,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR)
|
||||
|
||||
_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor(
|
||||
name='Operator',
|
||||
full_name='google.datastore.v1beta3.PropertyFilter.Operator',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='OPERATOR_UNSPECIFIED', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='LESS_THAN', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='LESS_THAN_OR_EQUAL', index=2, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='GREATER_THAN', index=3, number=3,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='GREATER_THAN_OR_EQUAL', index=4, number=4,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='EQUAL', index=5, number=5,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='HAS_ANCESTOR', index=6, number=11,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=1639,
|
||||
serialized_end=1788,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR)
|
||||
|
||||
_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor(
|
||||
name='MoreResultsType',
|
||||
full_name='google.datastore.v1beta3.QueryResultBatch.MoreResultsType',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
values=[
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='MORE_RESULTS_TYPE_UNSPECIFIED', index=0, number=0,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='NOT_FINISHED', index=1, number=1,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='MORE_RESULTS_AFTER_LIMIT', index=2, number=2,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='MORE_RESULTS_AFTER_CURSOR', index=3, number=4,
|
||||
options=None,
|
||||
type=None),
|
||||
_descriptor.EnumValueDescriptor(
|
||||
name='NO_MORE_RESULTS', index=4, number=3,
|
||||
options=None,
|
||||
type=None),
|
||||
],
|
||||
containing_type=None,
|
||||
options=None,
|
||||
serialized_start=2524,
|
||||
serialized_end=2676,
|
||||
)
|
||||
_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE)
|
||||
|
||||
|
||||
_ENTITYRESULT = _descriptor.Descriptor(
|
||||
name='EntityResult',
|
||||
full_name='google.datastore.v1beta3.EntityResult',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='entity', full_name='google.datastore.v1beta3.EntityResult.entity', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='cursor', full_name='google.datastore.v1beta3.EntityResult.cursor', index=1,
|
||||
number=3, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
_ENTITYRESULT_RESULTTYPE,
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=168,
|
||||
serialized_end=331,
|
||||
)
|
||||
|
||||
|
||||
_QUERY = _descriptor.Descriptor(
|
||||
name='Query',
|
||||
full_name='google.datastore.v1beta3.Query',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='projection', full_name='google.datastore.v1beta3.Query.projection', index=0,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='kind', full_name='google.datastore.v1beta3.Query.kind', index=1,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='filter', full_name='google.datastore.v1beta3.Query.filter', index=2,
|
||||
number=4, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='order', full_name='google.datastore.v1beta3.Query.order', index=3,
|
||||
number=5, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='distinct_on', full_name='google.datastore.v1beta3.Query.distinct_on', index=4,
|
||||
number=6, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='start_cursor', full_name='google.datastore.v1beta3.Query.start_cursor', index=5,
|
||||
number=7, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='end_cursor', full_name='google.datastore.v1beta3.Query.end_cursor', index=6,
|
||||
number=8, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='offset', full_name='google.datastore.v1beta3.Query.offset', index=7,
|
||||
number=10, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='limit', full_name='google.datastore.v1beta3.Query.limit', index=8,
|
||||
number=12, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=334,
|
||||
serialized_end=729,
|
||||
)
|
||||
|
||||
|
||||
_KINDEXPRESSION = _descriptor.Descriptor(
|
||||
name='KindExpression',
|
||||
full_name='google.datastore.v1beta3.KindExpression',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='google.datastore.v1beta3.KindExpression.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=731,
|
||||
serialized_end=761,
|
||||
)
|
||||
|
||||
|
||||
_PROPERTYREFERENCE = _descriptor.Descriptor(
|
||||
name='PropertyReference',
|
||||
full_name='google.datastore.v1beta3.PropertyReference',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='google.datastore.v1beta3.PropertyReference.name', index=0,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=763,
|
||||
serialized_end=796,
|
||||
)
|
||||
|
||||
|
||||
_PROJECTION = _descriptor.Descriptor(
|
||||
name='Projection',
|
||||
full_name='google.datastore.v1beta3.Projection',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='property', full_name='google.datastore.v1beta3.Projection.property', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=798,
|
||||
serialized_end=873,
|
||||
)
|
||||
|
||||
|
||||
_PROPERTYORDER = _descriptor.Descriptor(
|
||||
name='PropertyOrder',
|
||||
full_name='google.datastore.v1beta3.PropertyOrder',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='property', full_name='google.datastore.v1beta3.PropertyOrder.property', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='direction', full_name='google.datastore.v1beta3.PropertyOrder.direction', index=1,
|
||||
number=2, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
_PROPERTYORDER_DIRECTION,
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=876,
|
||||
serialized_end=1095,
|
||||
)
|
||||
|
||||
|
||||
_FILTER = _descriptor.Descriptor(
|
||||
name='Filter',
|
||||
full_name='google.datastore.v1beta3.Filter',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='composite_filter', full_name='google.datastore.v1beta3.Filter.composite_filter', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='property_filter', full_name='google.datastore.v1beta3.Filter.property_filter', index=1,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='filter_type', full_name='google.datastore.v1beta3.Filter.filter_type',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=1098,
|
||||
serialized_end=1261,
|
||||
)
|
||||
|
||||
|
||||
_COMPOSITEFILTER = _descriptor.Descriptor(
|
||||
name='CompositeFilter',
|
||||
full_name='google.datastore.v1beta3.CompositeFilter',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='op', full_name='google.datastore.v1beta3.CompositeFilter.op', index=0,
|
||||
number=1, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='filters', full_name='google.datastore.v1beta3.CompositeFilter.filters', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
_COMPOSITEFILTER_OPERATOR,
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1264,
|
||||
serialized_end=1443,
|
||||
)
|
||||
|
||||
|
||||
_PROPERTYFILTER = _descriptor.Descriptor(
|
||||
name='PropertyFilter',
|
||||
full_name='google.datastore.v1beta3.PropertyFilter',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='property', full_name='google.datastore.v1beta3.PropertyFilter.property', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='op', full_name='google.datastore.v1beta3.PropertyFilter.op', index=1,
|
||||
number=2, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='google.datastore.v1beta3.PropertyFilter.value', index=2,
|
||||
number=3, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
_PROPERTYFILTER_OPERATOR,
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1446,
|
||||
serialized_end=1788,
|
||||
)
|
||||
|
||||
|
||||
_GQLQUERY_NAMEDBINDINGSENTRY = _descriptor.Descriptor(
|
||||
name='NamedBindingsEntry',
|
||||
full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='key', full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry.key', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='google.datastore.v1beta3.GqlQuery.NamedBindingsEntry.value', index=1,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001'),
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=2002,
|
||||
serialized_end=2099,
|
||||
)
|
||||
|
||||
_GQLQUERY = _descriptor.Descriptor(
|
||||
name='GqlQuery',
|
||||
full_name='google.datastore.v1beta3.GqlQuery',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='query_string', full_name='google.datastore.v1beta3.GqlQuery.query_string', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"".decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='allow_literals', full_name='google.datastore.v1beta3.GqlQuery.allow_literals', index=1,
|
||||
number=2, type=8, cpp_type=7, label=1,
|
||||
has_default_value=False, default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='named_bindings', full_name='google.datastore.v1beta3.GqlQuery.named_bindings', index=2,
|
||||
number=5, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='positional_bindings', full_name='google.datastore.v1beta3.GqlQuery.positional_bindings', index=3,
|
||||
number=4, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[_GQLQUERY_NAMEDBINDINGSENTRY, ],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=1791,
|
||||
serialized_end=2099,
|
||||
)
|
||||
|
||||
|
||||
_GQLQUERYPARAMETER = _descriptor.Descriptor(
|
||||
name='GqlQueryParameter',
|
||||
full_name='google.datastore.v1beta3.GqlQueryParameter',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='google.datastore.v1beta3.GqlQueryParameter.value', index=0,
|
||||
number=2, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='cursor', full_name='google.datastore.v1beta3.GqlQueryParameter.cursor', index=1,
|
||||
number=3, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
_descriptor.OneofDescriptor(
|
||||
name='parameter_type', full_name='google.datastore.v1beta3.GqlQueryParameter.parameter_type',
|
||||
index=0, containing_type=None, fields=[]),
|
||||
],
|
||||
serialized_start=2101,
|
||||
serialized_end=2206,
|
||||
)
|
||||
|
||||
|
||||
_QUERYRESULTBATCH = _descriptor.Descriptor(
|
||||
name='QueryResultBatch',
|
||||
full_name='google.datastore.v1beta3.QueryResultBatch',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='skipped_results', full_name='google.datastore.v1beta3.QueryResultBatch.skipped_results', index=0,
|
||||
number=6, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='skipped_cursor', full_name='google.datastore.v1beta3.QueryResultBatch.skipped_cursor', index=1,
|
||||
number=3, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='entity_result_type', full_name='google.datastore.v1beta3.QueryResultBatch.entity_result_type', index=2,
|
||||
number=1, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='entity_results', full_name='google.datastore.v1beta3.QueryResultBatch.entity_results', index=3,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='end_cursor', full_name='google.datastore.v1beta3.QueryResultBatch.end_cursor', index=4,
|
||||
number=4, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=b"",
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='more_results', full_name='google.datastore.v1beta3.QueryResultBatch.more_results', index=5,
|
||||
number=5, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
_QUERYRESULTBATCH_MORERESULTSTYPE,
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=2209,
|
||||
serialized_end=2676,
|
||||
)
|
||||
|
||||
_ENTITYRESULT.fields_by_name['entity'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._ENTITY
|
||||
_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT
|
||||
_QUERY.fields_by_name['projection'].message_type = _PROJECTION
|
||||
_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION
|
||||
_QUERY.fields_by_name['filter'].message_type = _FILTER
|
||||
_QUERY.fields_by_name['order'].message_type = _PROPERTYORDER
|
||||
_QUERY.fields_by_name['distinct_on'].message_type = _PROPERTYREFERENCE
|
||||
_QUERY.fields_by_name['limit'].message_type = google_dot_protobuf_dot_wrappers__pb2._INT32VALUE
|
||||
_PROJECTION.fields_by_name['property'].message_type = _PROPERTYREFERENCE
|
||||
_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
|
||||
_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION
|
||||
_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER
|
||||
_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER
|
||||
_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER
|
||||
_FILTER.oneofs_by_name['filter_type'].fields.append(
|
||||
_FILTER.fields_by_name['composite_filter'])
|
||||
_FILTER.fields_by_name['composite_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type']
|
||||
_FILTER.oneofs_by_name['filter_type'].fields.append(
|
||||
_FILTER.fields_by_name['property_filter'])
|
||||
_FILTER.fields_by_name['property_filter'].containing_oneof = _FILTER.oneofs_by_name['filter_type']
|
||||
_COMPOSITEFILTER.fields_by_name['op'].enum_type = _COMPOSITEFILTER_OPERATOR
|
||||
_COMPOSITEFILTER.fields_by_name['filters'].message_type = _FILTER
|
||||
_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER
|
||||
_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
|
||||
_PROPERTYFILTER.fields_by_name['op'].enum_type = _PROPERTYFILTER_OPERATOR
|
||||
_PROPERTYFILTER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._VALUE
|
||||
_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER
|
||||
_GQLQUERY_NAMEDBINDINGSENTRY.fields_by_name['value'].message_type = _GQLQUERYPARAMETER
|
||||
_GQLQUERY_NAMEDBINDINGSENTRY.containing_type = _GQLQUERY
|
||||
_GQLQUERY.fields_by_name['named_bindings'].message_type = _GQLQUERY_NAMEDBINDINGSENTRY
|
||||
_GQLQUERY.fields_by_name['positional_bindings'].message_type = _GQLQUERYPARAMETER
|
||||
_GQLQUERYPARAMETER.fields_by_name['value'].message_type = google_dot_datastore_dot_v1beta3_dot_entity__pb2._VALUE
|
||||
_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append(
|
||||
_GQLQUERYPARAMETER.fields_by_name['value'])
|
||||
_GQLQUERYPARAMETER.fields_by_name['value'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type']
|
||||
_GQLQUERYPARAMETER.oneofs_by_name['parameter_type'].fields.append(
|
||||
_GQLQUERYPARAMETER.fields_by_name['cursor'])
|
||||
_GQLQUERYPARAMETER.fields_by_name['cursor'].containing_oneof = _GQLQUERYPARAMETER.oneofs_by_name['parameter_type']
|
||||
_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE
|
||||
_QUERYRESULTBATCH.fields_by_name['entity_results'].message_type = _ENTITYRESULT
|
||||
_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE
|
||||
_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH
|
||||
DESCRIPTOR.message_types_by_name['EntityResult'] = _ENTITYRESULT
|
||||
DESCRIPTOR.message_types_by_name['Query'] = _QUERY
|
||||
DESCRIPTOR.message_types_by_name['KindExpression'] = _KINDEXPRESSION
|
||||
DESCRIPTOR.message_types_by_name['PropertyReference'] = _PROPERTYREFERENCE
|
||||
DESCRIPTOR.message_types_by_name['Projection'] = _PROJECTION
|
||||
DESCRIPTOR.message_types_by_name['PropertyOrder'] = _PROPERTYORDER
|
||||
DESCRIPTOR.message_types_by_name['Filter'] = _FILTER
|
||||
DESCRIPTOR.message_types_by_name['CompositeFilter'] = _COMPOSITEFILTER
|
||||
DESCRIPTOR.message_types_by_name['PropertyFilter'] = _PROPERTYFILTER
|
||||
DESCRIPTOR.message_types_by_name['GqlQuery'] = _GQLQUERY
|
||||
DESCRIPTOR.message_types_by_name['GqlQueryParameter'] = _GQLQUERYPARAMETER
|
||||
DESCRIPTOR.message_types_by_name['QueryResultBatch'] = _QUERYRESULTBATCH
|
||||
|
||||
EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ENTITYRESULT,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.EntityResult)
|
||||
))
|
||||
_sym_db.RegisterMessage(EntityResult)
|
||||
|
||||
Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict(
|
||||
DESCRIPTOR = _QUERY,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Query)
|
||||
))
|
||||
_sym_db.RegisterMessage(Query)
|
||||
|
||||
KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict(
|
||||
DESCRIPTOR = _KINDEXPRESSION,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.KindExpression)
|
||||
))
|
||||
_sym_db.RegisterMessage(KindExpression)
|
||||
|
||||
PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict(
|
||||
DESCRIPTOR = _PROPERTYREFERENCE,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyReference)
|
||||
))
|
||||
_sym_db.RegisterMessage(PropertyReference)
|
||||
|
||||
Projection = _reflection.GeneratedProtocolMessageType('Projection', (_message.Message,), dict(
|
||||
DESCRIPTOR = _PROJECTION,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Projection)
|
||||
))
|
||||
_sym_db.RegisterMessage(Projection)
|
||||
|
||||
PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict(
|
||||
DESCRIPTOR = _PROPERTYORDER,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyOrder)
|
||||
))
|
||||
_sym_db.RegisterMessage(PropertyOrder)
|
||||
|
||||
Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict(
|
||||
DESCRIPTOR = _FILTER,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.Filter)
|
||||
))
|
||||
_sym_db.RegisterMessage(Filter)
|
||||
|
||||
CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict(
|
||||
DESCRIPTOR = _COMPOSITEFILTER,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.CompositeFilter)
|
||||
))
|
||||
_sym_db.RegisterMessage(CompositeFilter)
|
||||
|
||||
PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict(
|
||||
DESCRIPTOR = _PROPERTYFILTER,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.PropertyFilter)
|
||||
))
|
||||
_sym_db.RegisterMessage(PropertyFilter)
|
||||
|
||||
GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict(
|
||||
|
||||
NamedBindingsEntry = _reflection.GeneratedProtocolMessageType('NamedBindingsEntry', (_message.Message,), dict(
|
||||
DESCRIPTOR = _GQLQUERY_NAMEDBINDINGSENTRY,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQuery.NamedBindingsEntry)
|
||||
))
|
||||
,
|
||||
DESCRIPTOR = _GQLQUERY,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQuery)
|
||||
))
|
||||
_sym_db.RegisterMessage(GqlQuery)
|
||||
_sym_db.RegisterMessage(GqlQuery.NamedBindingsEntry)
|
||||
|
||||
GqlQueryParameter = _reflection.GeneratedProtocolMessageType('GqlQueryParameter', (_message.Message,), dict(
|
||||
DESCRIPTOR = _GQLQUERYPARAMETER,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.GqlQueryParameter)
|
||||
))
|
||||
_sym_db.RegisterMessage(GqlQueryParameter)
|
||||
|
||||
QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict(
|
||||
DESCRIPTOR = _QUERYRESULTBATCH,
|
||||
__module__ = 'google.datastore.v1beta3.query_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.datastore.v1beta3.QueryResultBatch)
|
||||
))
|
||||
_sym_db.RegisterMessage(QueryResultBatch)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), b'\n\034com.google.datastore.v1beta3B\nQueryProtoP\001')
|
||||
_GQLQUERY_NAMEDBINDINGSENTRY.has_options = True
|
||||
_GQLQUERY_NAMEDBINDINGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), b'8\001')
|
||||
# @@protoc_insertion_point(module_scope)
|
301
venv/Lib/site-packages/gcloud/datastore/batch.py
Normal file
301
venv/Lib/site-packages/gcloud/datastore/batch.py
Normal file
|
@ -0,0 +1,301 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Create / interact with a batch of updates / deletes.
|
||||
|
||||
Batches provide the ability to execute multiple operations
|
||||
in a single request to the Cloud Datastore API.
|
||||
|
||||
See
|
||||
https://cloud.google.com/datastore/docs/concepts/entities#Datastore_Batch_operations
|
||||
"""
|
||||
|
||||
from gcloud.datastore import helpers
|
||||
from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2
|
||||
|
||||
|
||||
class Batch(object):
|
||||
"""An abstraction representing a collected group of updates / deletes.
|
||||
|
||||
Used to build up a bulk mutuation.
|
||||
|
||||
For example, the following snippet of code will put the two ``save``
|
||||
operations and the ``delete`` operation into the same mutation, and send
|
||||
them to the server in a single API request::
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>> client = datastore.Client()
|
||||
>>> batch = client.batch()
|
||||
>>> batch.put(entity1)
|
||||
>>> batch.put(entity2)
|
||||
>>> batch.delete(key3)
|
||||
>>> batch.commit()
|
||||
|
||||
You can also use a batch as a context manager, in which case
|
||||
:meth:`commit` will be called automatically if its block exits without
|
||||
raising an exception::
|
||||
|
||||
>>> with batch:
|
||||
... batch.put(entity1)
|
||||
... batch.put(entity2)
|
||||
... batch.delete(key3)
|
||||
|
||||
By default, no updates will be sent if the block exits with an error::
|
||||
|
||||
>>> with batch:
|
||||
... do_some_work(batch)
|
||||
... raise Exception() # rolls back
|
||||
|
||||
:type client: :class:`gcloud.datastore.client.Client`
|
||||
:param client: The client used to connect to datastore.
|
||||
"""
|
||||
|
||||
_id = None # "protected" attribute, always None for non-transactions
|
||||
|
||||
_INITIAL = 0
|
||||
"""Enum value for _INITIAL status of batch/transaction."""
|
||||
|
||||
_IN_PROGRESS = 1
|
||||
"""Enum value for _IN_PROGRESS status of batch/transaction."""
|
||||
|
||||
_ABORTED = 2
|
||||
"""Enum value for _ABORTED status of batch/transaction."""
|
||||
|
||||
_FINISHED = 3
|
||||
"""Enum value for _FINISHED status of batch/transaction."""
|
||||
|
||||
def __init__(self, client):
|
||||
self._client = client
|
||||
self._commit_request = _datastore_pb2.CommitRequest()
|
||||
self._partial_key_entities = []
|
||||
self._status = self._INITIAL
|
||||
|
||||
def current(self):
|
||||
"""Return the topmost batch / transaction, or None."""
|
||||
return self._client.current_batch
|
||||
|
||||
@property
|
||||
def project(self):
|
||||
"""Getter for project in which the batch will run.
|
||||
|
||||
:rtype: :class:`str`
|
||||
:returns: The project in which the batch will run.
|
||||
"""
|
||||
return self._client.project
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
"""Getter for namespace in which the batch will run.
|
||||
|
||||
:rtype: :class:`str`
|
||||
:returns: The namespace in which the batch will run.
|
||||
"""
|
||||
return self._client.namespace
|
||||
|
||||
@property
|
||||
def connection(self):
|
||||
"""Getter for connection over which the batch will run.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.connection.Connection`
|
||||
:returns: The connection over which the batch will run.
|
||||
"""
|
||||
return self._client.connection
|
||||
|
||||
def _add_partial_key_entity_pb(self):
|
||||
"""Adds a new mutation for an entity with a partial key.
|
||||
|
||||
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:returns: The newly created entity protobuf that will be
|
||||
updated and sent with a commit.
|
||||
"""
|
||||
new_mutation = self.mutations.add()
|
||||
return new_mutation.insert
|
||||
|
||||
def _add_complete_key_entity_pb(self):
|
||||
"""Adds a new mutation for an entity with a completed key.
|
||||
|
||||
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:returns: The newly created entity protobuf that will be
|
||||
updated and sent with a commit.
|
||||
"""
|
||||
# We use ``upsert`` for entities with completed keys, rather than
|
||||
# ``insert`` or ``update``, in order not to create race conditions
|
||||
# based on prior existence / removal of the entity.
|
||||
new_mutation = self.mutations.add()
|
||||
return new_mutation.upsert
|
||||
|
||||
def _add_delete_key_pb(self):
|
||||
"""Adds a new mutation for a key to be deleted.
|
||||
|
||||
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:returns: The newly created key protobuf that will be
|
||||
deleted when sent with a commit.
|
||||
"""
|
||||
new_mutation = self.mutations.add()
|
||||
return new_mutation.delete
|
||||
|
||||
@property
|
||||
def mutations(self):
|
||||
"""Getter for the changes accumulated by this batch.
|
||||
|
||||
Every batch is committed with a single commit request containing all
|
||||
the work to be done as mutations. Inside a batch, calling :meth:`put`
|
||||
with an entity, or :meth:`delete` with a key, builds up the request by
|
||||
adding a new mutation. This getter returns the protobuf that has been
|
||||
built-up so far.
|
||||
|
||||
:rtype: iterable
|
||||
:returns: The list of :class:`._generated.datastore_pb2.Mutation`
|
||||
protobufs to be sent in the commit request.
|
||||
"""
|
||||
return self._commit_request.mutations
|
||||
|
||||
def put(self, entity):
|
||||
"""Remember an entity's state to be saved during :meth:`commit`.
|
||||
|
||||
.. note::
|
||||
Any existing properties for the entity will be replaced by those
|
||||
currently set on this instance. Already-stored properties which do
|
||||
not correspond to keys set on this instance will be removed from
|
||||
the datastore.
|
||||
|
||||
.. note::
|
||||
Property values which are "text" ('unicode' in Python2, 'str' in
|
||||
Python3) map to 'string_value' in the datastore; values which are
|
||||
"bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
|
||||
|
||||
When an entity has a partial key, calling :meth:`commit` sends it as
|
||||
an ``insert`` mutation and the key is completed. On return,
|
||||
the key for the ``entity`` passed in is updated to match the key ID
|
||||
assigned by the server.
|
||||
|
||||
:type entity: :class:`gcloud.datastore.entity.Entity`
|
||||
:param entity: the entity to be saved.
|
||||
|
||||
:raises: ValueError if entity has no key assigned, or if the key's
|
||||
``project`` does not match ours.
|
||||
"""
|
||||
if entity.key is None:
|
||||
raise ValueError("Entity must have a key")
|
||||
|
||||
if self.project != entity.key.project:
|
||||
raise ValueError("Key must be from same project as batch")
|
||||
|
||||
if entity.key.is_partial:
|
||||
entity_pb = self._add_partial_key_entity_pb()
|
||||
self._partial_key_entities.append(entity)
|
||||
else:
|
||||
entity_pb = self._add_complete_key_entity_pb()
|
||||
|
||||
_assign_entity_to_pb(entity_pb, entity)
|
||||
|
||||
def delete(self, key):
|
||||
"""Remember a key to be deleted during :meth:`commit`.
|
||||
|
||||
:type key: :class:`gcloud.datastore.key.Key`
|
||||
:param key: the key to be deleted.
|
||||
|
||||
:raises: ValueError if key is not complete, or if the key's
|
||||
``project`` does not match ours.
|
||||
"""
|
||||
if key.is_partial:
|
||||
raise ValueError("Key must be complete")
|
||||
|
||||
if self.project != key.project:
|
||||
raise ValueError("Key must be from same project as batch")
|
||||
|
||||
key_pb = key.to_protobuf()
|
||||
self._add_delete_key_pb().CopyFrom(key_pb)
|
||||
|
||||
def begin(self):
|
||||
"""Begins a batch.
|
||||
|
||||
This method is called automatically when entering a with
|
||||
statement, however it can be called explicitly if you don't want
|
||||
to use a context manager.
|
||||
|
||||
Overridden by :class:`gcloud.datastore.transaction.Transaction`.
|
||||
|
||||
:raises: :class:`ValueError` if the batch has already begun.
|
||||
"""
|
||||
if self._status != self._INITIAL:
|
||||
raise ValueError('Batch already started previously.')
|
||||
self._status = self._IN_PROGRESS
|
||||
|
||||
def _commit(self):
|
||||
"""Commits the batch.
|
||||
|
||||
This is called by :meth:`commit`.
|
||||
"""
|
||||
# NOTE: ``self._commit_request`` will be modified.
|
||||
_, updated_keys = self.connection.commit(
|
||||
self.project, self._commit_request, self._id)
|
||||
# If the back-end returns without error, we are guaranteed that
|
||||
# :meth:`Connection.commit` will return keys that match (length and
|
||||
# order) directly ``_partial_key_entities``.
|
||||
for new_key_pb, entity in zip(updated_keys,
|
||||
self._partial_key_entities):
|
||||
new_id = new_key_pb.path[-1].id
|
||||
entity.key = entity.key.completed_key(new_id)
|
||||
|
||||
def commit(self):
|
||||
"""Commits the batch.
|
||||
|
||||
This is called automatically upon exiting a with statement,
|
||||
however it can be called explicitly if you don't want to use a
|
||||
context manager.
|
||||
"""
|
||||
try:
|
||||
self._commit()
|
||||
finally:
|
||||
self._status = self._FINISHED
|
||||
|
||||
def rollback(self):
|
||||
"""Rolls back the current batch.
|
||||
|
||||
Marks the batch as aborted (can't be used again).
|
||||
|
||||
Overridden by :class:`gcloud.datastore.transaction.Transaction`.
|
||||
"""
|
||||
self._status = self._ABORTED
|
||||
|
||||
def __enter__(self):
|
||||
self._client._push_batch(self)
|
||||
self.begin()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
try:
|
||||
if exc_type is None:
|
||||
self.commit()
|
||||
else:
|
||||
self.rollback()
|
||||
finally:
|
||||
self._client._pop_batch()
|
||||
|
||||
|
||||
def _assign_entity_to_pb(entity_pb, entity):
|
||||
"""Copy ``entity`` into ``entity_pb``.
|
||||
|
||||
Helper method for ``Batch.put``.
|
||||
|
||||
:type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:param entity_pb: The entity owned by a mutation.
|
||||
|
||||
:type entity: :class:`gcloud.datastore.entity.Entity`
|
||||
:param entity: The entity being updated within the batch / transaction.
|
||||
"""
|
||||
bare_entity_pb = helpers.entity_to_protobuf(entity)
|
||||
bare_entity_pb.key.CopyFrom(bare_entity_pb.key)
|
||||
entity_pb.CopyFrom(bare_entity_pb)
|
454
venv/Lib/site-packages/gcloud/datastore/client.py
Normal file
454
venv/Lib/site-packages/gcloud/datastore/client.py
Normal file
|
@ -0,0 +1,454 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
"""Convenience wrapper for invoking APIs/factories w/ a project."""
|
||||
|
||||
import os
|
||||
|
||||
from gcloud._helpers import _LocalStack
|
||||
from gcloud._helpers import _determine_default_project as _base_default_project
|
||||
from gcloud.client import _ClientProjectMixin
|
||||
from gcloud.client import Client as _BaseClient
|
||||
from gcloud.datastore import helpers
|
||||
from gcloud.datastore.connection import Connection
|
||||
from gcloud.datastore.batch import Batch
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.key import Key
|
||||
from gcloud.datastore.query import Query
|
||||
from gcloud.datastore.transaction import Transaction
|
||||
from gcloud.environment_vars import GCD_DATASET
|
||||
|
||||
|
||||
_MAX_LOOPS = 128
|
||||
"""Maximum number of iterations to wait for deferred keys."""
|
||||
|
||||
|
||||
def _get_gcd_project():
|
||||
"""Gets the GCD application ID if it can be inferred."""
|
||||
return os.getenv(GCD_DATASET)
|
||||
|
||||
|
||||
def _determine_default_project(project=None):
|
||||
"""Determine default project explicitly or implicitly as fall-back.
|
||||
|
||||
In implicit case, supports four environments. In order of precedence, the
|
||||
implicit environments are:
|
||||
|
||||
* DATASTORE_DATASET environment variable (for ``gcd`` / emulator testing)
|
||||
* GCLOUD_PROJECT environment variable
|
||||
* Google App Engine application ID
|
||||
* Google Compute Engine project ID (from metadata server)
|
||||
|
||||
:type project: string
|
||||
:param project: Optional. The project to use as default.
|
||||
|
||||
:rtype: string or ``NoneType``
|
||||
:returns: Default project if it can be determined.
|
||||
"""
|
||||
if project is None:
|
||||
project = _get_gcd_project()
|
||||
|
||||
if project is None:
|
||||
project = _base_default_project(project=project)
|
||||
|
||||
return project
|
||||
|
||||
|
||||
def _extended_lookup(connection, project, key_pbs,
|
||||
missing=None, deferred=None,
|
||||
eventual=False, transaction_id=None):
|
||||
"""Repeat lookup until all keys found (unless stop requested).
|
||||
|
||||
Helper function for :meth:`Client.get_multi`.
|
||||
|
||||
:type connection: :class:`gcloud.datastore.connection.Connection`
|
||||
:param connection: The connection used to connect to datastore.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to make the request for.
|
||||
|
||||
:type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:param key_pbs: The keys to retrieve from the datastore.
|
||||
|
||||
:type missing: list
|
||||
:param missing: (Optional) If a list is passed, the key-only entity
|
||||
protobufs returned by the backend as "missing" will be
|
||||
copied into it.
|
||||
|
||||
:type deferred: list
|
||||
:param deferred: (Optional) If a list is passed, the key protobufs returned
|
||||
by the backend as "deferred" will be copied into it.
|
||||
|
||||
:type eventual: bool
|
||||
:param eventual: If False (the default), request ``STRONG`` read
|
||||
consistency. If True, request ``EVENTUAL`` read
|
||||
consistency.
|
||||
|
||||
:type transaction_id: string
|
||||
:param transaction_id: If passed, make the request in the scope of
|
||||
the given transaction. Incompatible with
|
||||
``eventual==True``.
|
||||
|
||||
:rtype: list of :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:returns: The requested entities.
|
||||
:raises: :class:`ValueError` if missing / deferred are not null or
|
||||
empty list.
|
||||
"""
|
||||
if missing is not None and missing != []:
|
||||
raise ValueError('missing must be None or an empty list')
|
||||
|
||||
if deferred is not None and deferred != []:
|
||||
raise ValueError('deferred must be None or an empty list')
|
||||
|
||||
results = []
|
||||
|
||||
loop_num = 0
|
||||
while loop_num < _MAX_LOOPS: # loop against possible deferred.
|
||||
loop_num += 1
|
||||
|
||||
results_found, missing_found, deferred_found = connection.lookup(
|
||||
project=project,
|
||||
key_pbs=key_pbs,
|
||||
eventual=eventual,
|
||||
transaction_id=transaction_id,
|
||||
)
|
||||
|
||||
results.extend(results_found)
|
||||
|
||||
if missing is not None:
|
||||
missing.extend(missing_found)
|
||||
|
||||
if deferred is not None:
|
||||
deferred.extend(deferred_found)
|
||||
break
|
||||
|
||||
if len(deferred_found) == 0:
|
||||
break
|
||||
|
||||
# We have deferred keys, and the user didn't ask to know about
|
||||
# them, so retry (but only with the deferred ones).
|
||||
key_pbs = deferred_found
|
||||
|
||||
return results
|
||||
|
||||
|
||||
class Client(_BaseClient, _ClientProjectMixin):
|
||||
"""Convenience wrapper for invoking APIs/factories w/ a project.
|
||||
|
||||
:type project: string
|
||||
:param project: (optional) The project to pass to proxied API methods.
|
||||
|
||||
:type namespace: string
|
||||
:param namespace: (optional) namespace to pass to proxied API methods.
|
||||
|
||||
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
|
||||
:class:`NoneType`
|
||||
:param credentials: The OAuth2 Credentials to use for the connection
|
||||
owned by this client. If not passed (and if no ``http``
|
||||
object is passed), falls back to the default inferred
|
||||
from the environment.
|
||||
|
||||
:type http: :class:`httplib2.Http` or class that defines ``request()``.
|
||||
:param http: An optional HTTP object to make requests. If not passed, an
|
||||
``http`` object is created that is bound to the
|
||||
``credentials`` for the current object.
|
||||
"""
|
||||
_connection_class = Connection
|
||||
|
||||
def __init__(self, project=None, namespace=None,
|
||||
credentials=None, http=None):
|
||||
_ClientProjectMixin.__init__(self, project=project)
|
||||
self.namespace = namespace
|
||||
self._batch_stack = _LocalStack()
|
||||
super(Client, self).__init__(credentials, http)
|
||||
|
||||
@staticmethod
|
||||
def _determine_default(project):
|
||||
"""Helper: override default project detection."""
|
||||
return _determine_default_project(project)
|
||||
|
||||
def _push_batch(self, batch):
|
||||
"""Push a batch/transaction onto our stack.
|
||||
|
||||
"Protected", intended for use by batch / transaction context mgrs.
|
||||
|
||||
:type batch: :class:`gcloud.datastore.batch.Batch`, or an object
|
||||
implementing its API.
|
||||
:param batch: newly-active batch/transaction.
|
||||
"""
|
||||
self._batch_stack.push(batch)
|
||||
|
||||
def _pop_batch(self):
|
||||
"""Pop a batch/transaction from our stack.
|
||||
|
||||
"Protected", intended for use by batch / transaction context mgrs.
|
||||
|
||||
:raises: IndexError if the stack is empty.
|
||||
:rtype: :class:`gcloud.datastore.batch.Batch`, or an object
|
||||
implementing its API.
|
||||
:returns: the top-most batch/transaction, after removing it.
|
||||
"""
|
||||
return self._batch_stack.pop()
|
||||
|
||||
@property
|
||||
def current_batch(self):
|
||||
"""Currently-active batch.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.batch.Batch`, or an object
|
||||
implementing its API, or ``NoneType`` (if no batch is active).
|
||||
:returns: The batch/transaction at the top of the batch stack.
|
||||
"""
|
||||
return self._batch_stack.top
|
||||
|
||||
@property
|
||||
def current_transaction(self):
|
||||
"""Currently-active transaction.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.transaction.Transaction`, or an object
|
||||
implementing its API, or ``NoneType`` (if no transaction is
|
||||
active).
|
||||
:returns: The transaction at the top of the batch stack.
|
||||
"""
|
||||
transaction = self.current_batch
|
||||
if isinstance(transaction, Transaction):
|
||||
return transaction
|
||||
|
||||
def get(self, key, missing=None, deferred=None, transaction=None):
|
||||
"""Retrieve an entity from a single key (if it exists).
|
||||
|
||||
.. note::
|
||||
|
||||
This is just a thin wrapper over :meth:`get_multi`.
|
||||
The backend API does not make a distinction between a single key or
|
||||
multiple keys in a lookup request.
|
||||
|
||||
:type key: :class:`gcloud.datastore.key.Key`
|
||||
:param key: The key to be retrieved from the datastore.
|
||||
|
||||
:type missing: list
|
||||
:param missing: (Optional) If a list is passed, the key-only entities
|
||||
returned by the backend as "missing" will be copied
|
||||
into it.
|
||||
|
||||
:type deferred: list
|
||||
:param deferred: (Optional) If a list is passed, the keys returned
|
||||
by the backend as "deferred" will be copied into it.
|
||||
|
||||
:type transaction: :class:`gcloud.datastore.transaction.Transaction`
|
||||
:param transaction: (Optional) Transaction to use for read consistency.
|
||||
If not passed, uses current transaction, if set.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.entity.Entity` or ``NoneType``
|
||||
:returns: The requested entity if it exists.
|
||||
"""
|
||||
entities = self.get_multi(keys=[key], missing=missing,
|
||||
deferred=deferred, transaction=transaction)
|
||||
if entities:
|
||||
return entities[0]
|
||||
|
||||
def get_multi(self, keys, missing=None, deferred=None, transaction=None):
|
||||
"""Retrieve entities, along with their attributes.
|
||||
|
||||
:type keys: list of :class:`gcloud.datastore.key.Key`
|
||||
:param keys: The keys to be retrieved from the datastore.
|
||||
|
||||
:type missing: list
|
||||
:param missing: (Optional) If a list is passed, the key-only entities
|
||||
returned by the backend as "missing" will be copied
|
||||
into it. If the list is not empty, an error will occur.
|
||||
|
||||
:type deferred: list
|
||||
:param deferred: (Optional) If a list is passed, the keys returned
|
||||
by the backend as "deferred" will be copied into it.
|
||||
If the list is not empty, an error will occur.
|
||||
|
||||
:type transaction: :class:`gcloud.datastore.transaction.Transaction`
|
||||
:param transaction: (Optional) Transaction to use for read consistency.
|
||||
If not passed, uses current transaction, if set.
|
||||
|
||||
:rtype: list of :class:`gcloud.datastore.entity.Entity`
|
||||
:returns: The requested entities.
|
||||
:raises: :class:`ValueError` if one or more of ``keys`` has a project
|
||||
which does not match our project.
|
||||
"""
|
||||
if not keys:
|
||||
return []
|
||||
|
||||
ids = set(key.project for key in keys)
|
||||
for current_id in ids:
|
||||
if current_id != self.project:
|
||||
raise ValueError('Keys do not match project')
|
||||
|
||||
if transaction is None:
|
||||
transaction = self.current_transaction
|
||||
|
||||
entity_pbs = _extended_lookup(
|
||||
connection=self.connection,
|
||||
project=self.project,
|
||||
key_pbs=[k.to_protobuf() for k in keys],
|
||||
missing=missing,
|
||||
deferred=deferred,
|
||||
transaction_id=transaction and transaction.id,
|
||||
)
|
||||
|
||||
if missing is not None:
|
||||
missing[:] = [
|
||||
helpers.entity_from_protobuf(missed_pb)
|
||||
for missed_pb in missing]
|
||||
|
||||
if deferred is not None:
|
||||
deferred[:] = [
|
||||
helpers.key_from_protobuf(deferred_pb)
|
||||
for deferred_pb in deferred]
|
||||
|
||||
return [helpers.entity_from_protobuf(entity_pb)
|
||||
for entity_pb in entity_pbs]
|
||||
|
||||
def put(self, entity):
|
||||
"""Save an entity in the Cloud Datastore.
|
||||
|
||||
.. note::
|
||||
|
||||
This is just a thin wrapper over :meth:`put_multi`.
|
||||
The backend API does not make a distinction between a single
|
||||
entity or multiple entities in a commit request.
|
||||
|
||||
:type entity: :class:`gcloud.datastore.entity.Entity`
|
||||
:param entity: The entity to be saved to the datastore.
|
||||
"""
|
||||
self.put_multi(entities=[entity])
|
||||
|
||||
def put_multi(self, entities):
|
||||
"""Save entities in the Cloud Datastore.
|
||||
|
||||
:type entities: list of :class:`gcloud.datastore.entity.Entity`
|
||||
:param entities: The entities to be saved to the datastore.
|
||||
|
||||
:raises: :class:`ValueError` if ``entities`` is a single entity.
|
||||
"""
|
||||
if isinstance(entities, Entity):
|
||||
raise ValueError("Pass a sequence of entities")
|
||||
|
||||
if not entities:
|
||||
return
|
||||
|
||||
current = self.current_batch
|
||||
in_batch = current is not None
|
||||
|
||||
if not in_batch:
|
||||
current = self.batch()
|
||||
|
||||
for entity in entities:
|
||||
current.put(entity)
|
||||
|
||||
if not in_batch:
|
||||
current.commit()
|
||||
|
||||
def delete(self, key):
|
||||
"""Delete the key in the Cloud Datastore.
|
||||
|
||||
.. note::
|
||||
|
||||
This is just a thin wrapper over :meth:`delete_multi`.
|
||||
The backend API does not make a distinction between a single key or
|
||||
multiple keys in a commit request.
|
||||
|
||||
:type key: :class:`gcloud.datastore.key.Key`
|
||||
:param key: The key to be deleted from the datastore.
|
||||
"""
|
||||
return self.delete_multi(keys=[key])
|
||||
|
||||
def delete_multi(self, keys):
|
||||
"""Delete keys from the Cloud Datastore.
|
||||
|
||||
:type keys: list of :class:`gcloud.datastore.key.Key`
|
||||
:param keys: The keys to be deleted from the datastore.
|
||||
"""
|
||||
if not keys:
|
||||
return
|
||||
|
||||
# We allow partial keys to attempt a delete, the backend will fail.
|
||||
current = self.current_batch
|
||||
in_batch = current is not None
|
||||
|
||||
if not in_batch:
|
||||
current = self.batch()
|
||||
|
||||
for key in keys:
|
||||
current.delete(key)
|
||||
|
||||
if not in_batch:
|
||||
current.commit()
|
||||
|
||||
def allocate_ids(self, incomplete_key, num_ids):
|
||||
"""Allocate a list of IDs from a partial key.
|
||||
|
||||
:type incomplete_key: :class:`gcloud.datastore.key.Key`
|
||||
:param incomplete_key: Partial key to use as base for allocated IDs.
|
||||
|
||||
:type num_ids: int
|
||||
:param num_ids: The number of IDs to allocate.
|
||||
|
||||
:rtype: list of :class:`gcloud.datastore.key.Key`
|
||||
:returns: The (complete) keys allocated with ``incomplete_key`` as
|
||||
root.
|
||||
:raises: :class:`ValueError` if ``incomplete_key`` is not a
|
||||
partial key.
|
||||
"""
|
||||
if not incomplete_key.is_partial:
|
||||
raise ValueError(('Key is not partial.', incomplete_key))
|
||||
|
||||
incomplete_key_pb = incomplete_key.to_protobuf()
|
||||
incomplete_key_pbs = [incomplete_key_pb] * num_ids
|
||||
|
||||
conn = self.connection
|
||||
allocated_key_pbs = conn.allocate_ids(incomplete_key.project,
|
||||
incomplete_key_pbs)
|
||||
allocated_ids = [allocated_key_pb.path[-1].id
|
||||
for allocated_key_pb in allocated_key_pbs]
|
||||
return [incomplete_key.completed_key(allocated_id)
|
||||
for allocated_id in allocated_ids]
|
||||
|
||||
def key(self, *path_args, **kwargs):
|
||||
"""Proxy to :class:`gcloud.datastore.key.Key`.
|
||||
|
||||
Passes our ``project``.
|
||||
"""
|
||||
if 'project' in kwargs:
|
||||
raise TypeError('Cannot pass project')
|
||||
kwargs['project'] = self.project
|
||||
if 'namespace' not in kwargs:
|
||||
kwargs['namespace'] = self.namespace
|
||||
return Key(*path_args, **kwargs)
|
||||
|
||||
def batch(self):
|
||||
"""Proxy to :class:`gcloud.datastore.batch.Batch`."""
|
||||
return Batch(self)
|
||||
|
||||
def transaction(self):
|
||||
"""Proxy to :class:`gcloud.datastore.transaction.Transaction`."""
|
||||
return Transaction(self)
|
||||
|
||||
def query(self, **kwargs):
|
||||
"""Proxy to :class:`gcloud.datastore.query.Query`.
|
||||
|
||||
Passes our ``project``.
|
||||
"""
|
||||
if 'client' in kwargs:
|
||||
raise TypeError('Cannot pass client')
|
||||
if 'project' in kwargs:
|
||||
raise TypeError('Cannot pass project')
|
||||
kwargs['project'] = self.project
|
||||
if 'namespace' not in kwargs:
|
||||
kwargs['namespace'] = self.namespace
|
||||
return Query(self, **kwargs)
|
426
venv/Lib/site-packages/gcloud/datastore/connection.py
Normal file
426
venv/Lib/site-packages/gcloud/datastore/connection.py
Normal file
|
@ -0,0 +1,426 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Connections to gcloud datastore API servers."""
|
||||
|
||||
import os
|
||||
|
||||
from gcloud import connection
|
||||
from gcloud.environment_vars import GCD_HOST
|
||||
from gcloud.exceptions import make_exception
|
||||
from gcloud.datastore._generated import datastore_pb2 as _datastore_pb2
|
||||
from google.rpc import status_pb2
|
||||
|
||||
|
||||
class Connection(connection.Connection):
|
||||
"""A connection to the Google Cloud Datastore via the Protobuf API.
|
||||
|
||||
This class should understand only the basic types (and protobufs)
|
||||
in method arguments, however should be capable of returning advanced types.
|
||||
|
||||
:type credentials: :class:`oauth2client.client.OAuth2Credentials`
|
||||
:param credentials: The OAuth2 Credentials to use for this connection.
|
||||
|
||||
:type http: :class:`httplib2.Http` or class that defines ``request()``.
|
||||
:param http: An optional HTTP object to make requests.
|
||||
|
||||
:type api_base_url: string
|
||||
:param api_base_url: The base of the API call URL. Defaults to
|
||||
:attr:`API_BASE_URL`.
|
||||
"""
|
||||
|
||||
API_BASE_URL = 'https://datastore.googleapis.com'
|
||||
"""The base of the API call URL."""
|
||||
|
||||
API_VERSION = 'v1beta3'
|
||||
"""The version of the API, used in building the API call's URL."""
|
||||
|
||||
API_URL_TEMPLATE = ('{api_base}/{api_version}/projects'
|
||||
'/{project}:{method}')
|
||||
"""A template for the URL of a particular API call."""
|
||||
|
||||
SCOPE = ('https://www.googleapis.com/auth/datastore',)
|
||||
"""The scopes required for authenticating as a Cloud Datastore consumer."""
|
||||
|
||||
def __init__(self, credentials=None, http=None, api_base_url=None):
|
||||
super(Connection, self).__init__(credentials=credentials, http=http)
|
||||
if api_base_url is None:
|
||||
try:
|
||||
# gcd.sh has /datastore/ in the path still since it supports
|
||||
# v1beta2 and v1beta3 simultaneously.
|
||||
api_base_url = '%s/datastore' % (os.environ[GCD_HOST],)
|
||||
except KeyError:
|
||||
api_base_url = self.__class__.API_BASE_URL
|
||||
self.api_base_url = api_base_url
|
||||
|
||||
def _request(self, project, method, data):
|
||||
"""Make a request over the Http transport to the Cloud Datastore API.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to make the request for.
|
||||
|
||||
:type method: string
|
||||
:param method: The API call method name (ie, ``runQuery``,
|
||||
``lookup``, etc)
|
||||
|
||||
:type data: string
|
||||
:param data: The data to send with the API call.
|
||||
Typically this is a serialized Protobuf string.
|
||||
|
||||
:rtype: string
|
||||
:returns: The string response content from the API call.
|
||||
:raises: :class:`gcloud.exceptions.GCloudError` if the response
|
||||
code is not 200 OK.
|
||||
"""
|
||||
headers = {
|
||||
'Content-Type': 'application/x-protobuf',
|
||||
'Content-Length': str(len(data)),
|
||||
'User-Agent': self.USER_AGENT,
|
||||
}
|
||||
headers, content = self.http.request(
|
||||
uri=self.build_api_url(project=project, method=method),
|
||||
method='POST', headers=headers, body=data)
|
||||
|
||||
status = headers['status']
|
||||
if status != '200':
|
||||
error_status = status_pb2.Status.FromString(content)
|
||||
raise make_exception(headers, error_status.message, use_json=False)
|
||||
|
||||
return content
|
||||
|
||||
def _rpc(self, project, method, request_pb, response_pb_cls):
|
||||
"""Make a protobuf RPC request.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to connect to. This is
|
||||
usually your project name in the cloud console.
|
||||
|
||||
:type method: string
|
||||
:param method: The name of the method to invoke.
|
||||
|
||||
:type request_pb: :class:`google.protobuf.message.Message` instance
|
||||
:param request_pb: the protobuf instance representing the request.
|
||||
|
||||
:type response_pb_cls: A :class:`google.protobuf.message.Message'
|
||||
subclass.
|
||||
:param response_pb_cls: The class used to unmarshall the response
|
||||
protobuf.
|
||||
"""
|
||||
response = self._request(project=project, method=method,
|
||||
data=request_pb.SerializeToString())
|
||||
return response_pb_cls.FromString(response)
|
||||
|
||||
def build_api_url(self, project, method, base_url=None,
|
||||
api_version=None):
|
||||
"""Construct the URL for a particular API call.
|
||||
|
||||
This method is used internally to come up with the URL to use when
|
||||
making RPCs to the Cloud Datastore API.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to connect to. This is
|
||||
usually your project name in the cloud console.
|
||||
|
||||
:type method: string
|
||||
:param method: The API method to call (e.g. 'runQuery', 'lookup').
|
||||
|
||||
:type base_url: string
|
||||
:param base_url: The base URL where the API lives.
|
||||
You shouldn't have to provide this.
|
||||
|
||||
:type api_version: string
|
||||
:param api_version: The version of the API to connect to.
|
||||
You shouldn't have to provide this.
|
||||
"""
|
||||
return self.API_URL_TEMPLATE.format(
|
||||
api_base=(base_url or self.api_base_url),
|
||||
api_version=(api_version or self.API_VERSION),
|
||||
project=project, method=method)
|
||||
|
||||
def lookup(self, project, key_pbs,
|
||||
eventual=False, transaction_id=None):
|
||||
"""Lookup keys from a project in the Cloud Datastore.
|
||||
|
||||
Maps the ``DatastoreService.Lookup`` protobuf RPC.
|
||||
|
||||
This uses mostly protobufs
|
||||
(:class:`gcloud.datastore._generated.entity_pb2.Key` as input and
|
||||
:class:`gcloud.datastore._generated.entity_pb2.Entity` as output). It
|
||||
is used under the hood in
|
||||
:meth:`Client.get() <.datastore.client.Client.get>`:
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>> client = datastore.Client(project='project')
|
||||
>>> key = client.key('MyKind', 1234)
|
||||
>>> client.get(key)
|
||||
[<Entity object>]
|
||||
|
||||
Using a :class:`Connection` directly:
|
||||
|
||||
>>> connection.lookup('project', [key.to_protobuf()])
|
||||
[<Entity protobuf>]
|
||||
|
||||
:type project: string
|
||||
:param project: The project to look up the keys in.
|
||||
|
||||
:type key_pbs: list of
|
||||
:class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:param key_pbs: The keys to retrieve from the datastore.
|
||||
|
||||
:type eventual: bool
|
||||
:param eventual: If False (the default), request ``STRONG`` read
|
||||
consistency. If True, request ``EVENTUAL`` read
|
||||
consistency.
|
||||
|
||||
:type transaction_id: string
|
||||
:param transaction_id: If passed, make the request in the scope of
|
||||
the given transaction. Incompatible with
|
||||
``eventual==True``.
|
||||
|
||||
:rtype: tuple
|
||||
:returns: A triple of (``results``, ``missing``, ``deferred``) where
|
||||
both ``results`` and ``missing`` are lists of
|
||||
:class:`gcloud.datastore._generated.entity_pb2.Entity` and
|
||||
``deferred`` is a list of
|
||||
:class:`gcloud.datastore._generated.entity_pb2.Key`.
|
||||
"""
|
||||
lookup_request = _datastore_pb2.LookupRequest()
|
||||
_set_read_options(lookup_request, eventual, transaction_id)
|
||||
_add_keys_to_request(lookup_request.keys, key_pbs)
|
||||
|
||||
lookup_response = self._rpc(project, 'lookup', lookup_request,
|
||||
_datastore_pb2.LookupResponse)
|
||||
|
||||
results = [result.entity for result in lookup_response.found]
|
||||
missing = [result.entity for result in lookup_response.missing]
|
||||
|
||||
return results, missing, list(lookup_response.deferred)
|
||||
|
||||
def run_query(self, project, query_pb, namespace=None,
|
||||
eventual=False, transaction_id=None):
|
||||
"""Run a query on the Cloud Datastore.
|
||||
|
||||
Maps the ``DatastoreService.RunQuery`` protobuf RPC.
|
||||
|
||||
Given a Query protobuf, sends a ``runQuery`` request to the
|
||||
Cloud Datastore API and returns a list of entity protobufs
|
||||
matching the query.
|
||||
|
||||
You typically wouldn't use this method directly, in favor of the
|
||||
:meth:`gcloud.datastore.query.Query.fetch` method.
|
||||
|
||||
Under the hood, the :class:`gcloud.datastore.query.Query` class
|
||||
uses this method to fetch data:
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>> client = datastore.Client()
|
||||
>>> query = client.query(kind='MyKind')
|
||||
>>> query.add_filter('property', '=', 'val')
|
||||
|
||||
Using the query iterator's
|
||||
:meth:`next_page() <.datastore.query.Iterator.next_page>` method:
|
||||
|
||||
>>> query_iter = query.fetch()
|
||||
>>> entities, more_results, cursor = query_iter.next_page()
|
||||
>>> entities
|
||||
[<list of Entity unmarshalled from protobuf>]
|
||||
>>> more_results
|
||||
<boolean of more results>
|
||||
>>> cursor
|
||||
<string containing cursor where fetch stopped>
|
||||
|
||||
Under the hood this is doing:
|
||||
|
||||
>>> connection.run_query('project', query.to_protobuf())
|
||||
[<list of Entity Protobufs>], cursor, more_results, skipped_results
|
||||
|
||||
:type project: string
|
||||
:param project: The project over which to run the query.
|
||||
|
||||
:type query_pb: :class:`gcloud.datastore._generated.query_pb2.Query`
|
||||
:param query_pb: The Protobuf representing the query to run.
|
||||
|
||||
:type namespace: string
|
||||
:param namespace: The namespace over which to run the query.
|
||||
|
||||
:type eventual: bool
|
||||
:param eventual: If False (the default), request ``STRONG`` read
|
||||
consistency. If True, request ``EVENTUAL`` read
|
||||
consistency.
|
||||
|
||||
:type transaction_id: string
|
||||
:param transaction_id: If passed, make the request in the scope of
|
||||
the given transaction. Incompatible with
|
||||
``eventual==True``.
|
||||
|
||||
:rtype: tuple
|
||||
:returns: Four-tuple containing the entities returned,
|
||||
the end cursor of the query, a ``more_results``
|
||||
enum and a count of the number of skipped results.
|
||||
"""
|
||||
request = _datastore_pb2.RunQueryRequest()
|
||||
_set_read_options(request, eventual, transaction_id)
|
||||
|
||||
if namespace:
|
||||
request.partition_id.namespace_id = namespace
|
||||
|
||||
request.query.CopyFrom(query_pb)
|
||||
response = self._rpc(project, 'runQuery', request,
|
||||
_datastore_pb2.RunQueryResponse)
|
||||
return (
|
||||
[e.entity for e in response.batch.entity_results],
|
||||
response.batch.end_cursor, # Assume response always has cursor.
|
||||
response.batch.more_results,
|
||||
response.batch.skipped_results,
|
||||
)
|
||||
|
||||
def begin_transaction(self, project):
|
||||
"""Begin a transaction.
|
||||
|
||||
Maps the ``DatastoreService.BeginTransaction`` protobuf RPC.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to which the transaction applies.
|
||||
|
||||
:rtype: bytes
|
||||
:returns: The serialized transaction that was begun.
|
||||
"""
|
||||
request = _datastore_pb2.BeginTransactionRequest()
|
||||
response = self._rpc(project, 'beginTransaction', request,
|
||||
_datastore_pb2.BeginTransactionResponse)
|
||||
return response.transaction
|
||||
|
||||
def commit(self, project, request, transaction_id):
|
||||
"""Commit mutations in context of current transation (if any).
|
||||
|
||||
Maps the ``DatastoreService.Commit`` protobuf RPC.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to which the transaction applies.
|
||||
|
||||
:type request: :class:`._generated.datastore_pb2.CommitRequest`
|
||||
:param request: The protobuf with the mutations being committed.
|
||||
|
||||
:type transaction_id: string or None
|
||||
:param transaction_id: The transaction ID returned from
|
||||
:meth:`begin_transaction`. Non-transactional
|
||||
batches must pass ``None``.
|
||||
|
||||
.. note::
|
||||
|
||||
This method will mutate ``request`` before using it.
|
||||
|
||||
:rtype: tuple
|
||||
:returns': The pair of the number of index updates and a list of
|
||||
:class:`._generated.entity_pb2.Key` for each incomplete key
|
||||
that was completed in the commit.
|
||||
"""
|
||||
if transaction_id:
|
||||
request.mode = _datastore_pb2.CommitRequest.TRANSACTIONAL
|
||||
request.transaction = transaction_id
|
||||
else:
|
||||
request.mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL
|
||||
|
||||
response = self._rpc(project, 'commit', request,
|
||||
_datastore_pb2.CommitResponse)
|
||||
return _parse_commit_response(response)
|
||||
|
||||
def rollback(self, project, transaction_id):
|
||||
"""Rollback the connection's existing transaction.
|
||||
|
||||
Maps the ``DatastoreService.Rollback`` protobuf RPC.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to which the transaction belongs.
|
||||
|
||||
:type transaction_id: string
|
||||
:param transaction_id: The transaction ID returned from
|
||||
:meth:`begin_transaction`.
|
||||
"""
|
||||
request = _datastore_pb2.RollbackRequest()
|
||||
request.transaction = transaction_id
|
||||
# Nothing to do with this response, so just execute the method.
|
||||
self._rpc(project, 'rollback', request,
|
||||
_datastore_pb2.RollbackResponse)
|
||||
|
||||
def allocate_ids(self, project, key_pbs):
|
||||
"""Obtain backend-generated IDs for a set of keys.
|
||||
|
||||
Maps the ``DatastoreService.AllocateIds`` protobuf RPC.
|
||||
|
||||
:type project: string
|
||||
:param project: The project to which the transaction belongs.
|
||||
|
||||
:type key_pbs: list of
|
||||
:class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:param key_pbs: The keys for which the backend should allocate IDs.
|
||||
|
||||
:rtype: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:returns: An equal number of keys, with IDs filled in by the backend.
|
||||
"""
|
||||
request = _datastore_pb2.AllocateIdsRequest()
|
||||
_add_keys_to_request(request.keys, key_pbs)
|
||||
# Nothing to do with this response, so just execute the method.
|
||||
response = self._rpc(project, 'allocateIds', request,
|
||||
_datastore_pb2.AllocateIdsResponse)
|
||||
return list(response.keys)
|
||||
|
||||
|
||||
def _set_read_options(request, eventual, transaction_id):
|
||||
"""Validate rules for read options, and assign to the request.
|
||||
|
||||
Helper method for ``lookup()`` and ``run_query``.
|
||||
|
||||
:raises: :class:`ValueError` if ``eventual`` is ``True`` and the
|
||||
``transaction_id`` is not ``None``.
|
||||
"""
|
||||
if eventual and (transaction_id is not None):
|
||||
raise ValueError('eventual must be False when in a transaction')
|
||||
|
||||
opts = request.read_options
|
||||
if eventual:
|
||||
opts.read_consistency = _datastore_pb2.ReadOptions.EVENTUAL
|
||||
elif transaction_id:
|
||||
opts.transaction = transaction_id
|
||||
|
||||
|
||||
def _add_keys_to_request(request_field_pb, key_pbs):
|
||||
"""Add protobuf keys to a request object.
|
||||
|
||||
:type request_field_pb: `RepeatedCompositeFieldContainer`
|
||||
:param request_field_pb: A repeated proto field that contains keys.
|
||||
|
||||
:type key_pbs: list of :class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:param key_pbs: The keys to add to a request.
|
||||
"""
|
||||
for key_pb in key_pbs:
|
||||
request_field_pb.add().CopyFrom(key_pb)
|
||||
|
||||
|
||||
def _parse_commit_response(commit_response_pb):
|
||||
"""Extract response data from a commit response.
|
||||
|
||||
:type commit_response_pb: :class:`._generated.datastore_pb2.CommitResponse`
|
||||
:param commit_response_pb: The protobuf response from a commit request.
|
||||
|
||||
:rtype: tuple
|
||||
:returns': The pair of the number of index updates and a list of
|
||||
:class:`._generated.entity_pb2.Key` for each incomplete key
|
||||
that was completed in the commit.
|
||||
"""
|
||||
mut_results = commit_response_pb.mutation_results
|
||||
index_updates = commit_response_pb.index_updates
|
||||
completed_keys = [mut_result.key for mut_result in mut_results
|
||||
if mut_result.HasField('key')] # Message field (Key)
|
||||
return index_updates, completed_keys
|
142
venv/Lib/site-packages/gcloud/datastore/entity.py
Normal file
142
venv/Lib/site-packages/gcloud/datastore/entity.py
Normal file
|
@ -0,0 +1,142 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Class for representing a single entity in the Cloud Datastore."""
|
||||
|
||||
|
||||
from gcloud._helpers import _ensure_tuple_or_list
|
||||
|
||||
|
||||
class Entity(dict):
|
||||
"""Entities are akin to rows in a relational database
|
||||
|
||||
An entity storing the actual instance of data.
|
||||
|
||||
Each entity is officially represented with a
|
||||
:class:`gcloud.datastore.key.Key` class, however it is possible that
|
||||
you might create an Entity with only a partial Key (that is, a Key
|
||||
with a Kind, and possibly a parent, but without an ID). In such a
|
||||
case, the datastore service will automatically assign an ID to the
|
||||
partial key.
|
||||
|
||||
Entities in this API act like dictionaries with extras built in that
|
||||
allow you to delete or persist the data stored on the entity.
|
||||
|
||||
Entities are mutable and act like a subclass of a dictionary.
|
||||
This means you could take an existing entity and change the key
|
||||
to duplicate the object.
|
||||
|
||||
Use :func:`gcloud.datastore.get` to retrieve an existing entity.
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>> client = datastore.Client()
|
||||
>>> client.get(key)
|
||||
<Entity[{'kind': 'EntityKind', id: 1234}] {'property': 'value'}>
|
||||
|
||||
You can the set values on the entity just like you would on any
|
||||
other dictionary.
|
||||
|
||||
>>> entity['age'] = 20
|
||||
>>> entity['name'] = 'JJ'
|
||||
>>> entity
|
||||
<Entity[{'kind': 'EntityKind', id: 1234}] {'age': 20, 'name': 'JJ'}>
|
||||
|
||||
And you can convert an entity to a regular Python dictionary with the
|
||||
``dict`` builtin:
|
||||
|
||||
>>> dict(entity)
|
||||
{'age': 20, 'name': 'JJ'}
|
||||
|
||||
.. note::
|
||||
|
||||
When saving an entity to the backend, values which are "text"
|
||||
(``unicode`` in Python2, ``str`` in Python3) will be saved using
|
||||
the 'text_value' field, after being encoded to UTF-8. When
|
||||
retrieved from the back-end, such values will be decoded to "text"
|
||||
again. Values which are "bytes" (``str`` in Python2, ``bytes`` in
|
||||
Python3), will be saved using the 'blob_value' field, without
|
||||
any decoding / encoding step.
|
||||
|
||||
:type key: :class:`gcloud.datastore.key.Key`
|
||||
:param key: Optional key to be set on entity.
|
||||
|
||||
:type exclude_from_indexes: tuple of string
|
||||
:param exclude_from_indexes: Names of fields whose values are not to be
|
||||
indexed for this entity.
|
||||
"""
|
||||
|
||||
def __init__(self, key=None, exclude_from_indexes=()):
|
||||
super(Entity, self).__init__()
|
||||
self.key = key
|
||||
self._exclude_from_indexes = set(_ensure_tuple_or_list(
|
||||
'exclude_from_indexes', exclude_from_indexes))
|
||||
# NOTE: This will be populated when parsing a protobuf in
|
||||
# gcloud.datastore.helpers.entity_from_protobuf.
|
||||
self._meanings = {}
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compare two entities for equality.
|
||||
|
||||
Entities compare equal if their keys compare equal, and their
|
||||
properties compare equal.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: True if the entities compare equal, else False.
|
||||
"""
|
||||
if not isinstance(other, Entity):
|
||||
return False
|
||||
|
||||
return (self.key == other.key and
|
||||
self._exclude_from_indexes == other._exclude_from_indexes and
|
||||
self._meanings == other._meanings and
|
||||
super(Entity, self).__eq__(other))
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Compare two entities for inequality.
|
||||
|
||||
Entities compare equal if their keys compare equal, and their
|
||||
properties compare equal.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: False if the entities compare equal, else True.
|
||||
"""
|
||||
return not self.__eq__(other)
|
||||
|
||||
@property
|
||||
def kind(self):
|
||||
"""Get the kind of the current entity.
|
||||
|
||||
.. note::
|
||||
This relies entirely on the :class:`gcloud.datastore.key.Key`
|
||||
set on the entity. That means that we're not storing the kind
|
||||
of the entity at all, just the properties and a pointer to a
|
||||
Key which knows its Kind.
|
||||
"""
|
||||
if self.key:
|
||||
return self.key.kind
|
||||
|
||||
@property
|
||||
def exclude_from_indexes(self):
|
||||
"""Names of fields which are *not* to be indexed for this entity.
|
||||
|
||||
:rtype: sequence of field names
|
||||
"""
|
||||
return frozenset(self._exclude_from_indexes)
|
||||
|
||||
def __repr__(self):
|
||||
if self.key:
|
||||
return '<Entity%s %s>' % (self.key.path,
|
||||
super(Entity, self).__repr__())
|
||||
else:
|
||||
return '<Entity %s>' % (super(Entity, self).__repr__())
|
468
venv/Lib/site-packages/gcloud/datastore/helpers.py
Normal file
468
venv/Lib/site-packages/gcloud/datastore/helpers.py
Normal file
|
@ -0,0 +1,468 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Helper functions for dealing with Cloud Datastore's Protobuf API.
|
||||
|
||||
The non-private functions are part of the API.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import itertools
|
||||
|
||||
from google.protobuf import struct_pb2
|
||||
from google.type import latlng_pb2
|
||||
import six
|
||||
|
||||
from gcloud._helpers import _datetime_to_pb_timestamp
|
||||
from gcloud._helpers import _pb_timestamp_to_datetime
|
||||
from gcloud.datastore._generated import entity_pb2 as _entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
__all__ = ('entity_from_protobuf', 'key_from_protobuf')
|
||||
|
||||
|
||||
def _get_meaning(value_pb, is_list=False):
|
||||
"""Get the meaning from a protobuf value.
|
||||
|
||||
:type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
|
||||
:param value_pb: The protobuf value to be checked for an
|
||||
associated meaning.
|
||||
|
||||
:type is_list: bool
|
||||
:param is_list: Boolean indicating if the ``value_pb`` contains
|
||||
a list value.
|
||||
|
||||
:rtype: int
|
||||
:returns: The meaning for the ``value_pb`` if one is set, else
|
||||
:data:`None`. For a list value, if there are disagreeing
|
||||
means it just returns a list of meanings. If all the
|
||||
list meanings agree, it just condenses them.
|
||||
"""
|
||||
meaning = None
|
||||
if is_list:
|
||||
# An empty list will have no values, hence no shared meaning
|
||||
# set among them.
|
||||
if len(value_pb.array_value.values) == 0:
|
||||
return None
|
||||
|
||||
# We check among all the meanings, some of which may be None,
|
||||
# the rest which may be enum/int values.
|
||||
all_meanings = [_get_meaning(sub_value_pb)
|
||||
for sub_value_pb in value_pb.array_value.values]
|
||||
unique_meanings = set(all_meanings)
|
||||
if len(unique_meanings) == 1:
|
||||
# If there is a unique meaning, we preserve it.
|
||||
meaning = unique_meanings.pop()
|
||||
else: # We know len(value_pb.array_value.values) > 0.
|
||||
# If the meaning is not unique, just return all of them.
|
||||
meaning = all_meanings
|
||||
elif value_pb.meaning: # Simple field (int32)
|
||||
meaning = value_pb.meaning
|
||||
|
||||
return meaning
|
||||
|
||||
|
||||
def _new_value_pb(entity_pb, name):
|
||||
"""Add (by name) a new ``Value`` protobuf to an entity protobuf.
|
||||
|
||||
:type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:param entity_pb: An entity protobuf to add a new property to.
|
||||
|
||||
:type name: string
|
||||
:param name: The name of the new property.
|
||||
|
||||
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Value`
|
||||
:returns: The new ``Value`` protobuf that was added to the entity.
|
||||
"""
|
||||
return entity_pb.properties.get_or_create(name)
|
||||
|
||||
|
||||
def _property_tuples(entity_pb):
|
||||
"""Iterator of name, ``Value`` tuples from entity properties.
|
||||
|
||||
:type entity_pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:param entity_pb: An entity protobuf to add a new property to.
|
||||
|
||||
:rtype: :class:`generator`
|
||||
:returns: An iterator that yields tuples of a name and ``Value``
|
||||
corresponding to properties on the entity.
|
||||
"""
|
||||
return six.iteritems(entity_pb.properties)
|
||||
|
||||
|
||||
def entity_from_protobuf(pb):
|
||||
"""Factory method for creating an entity based on a protobuf.
|
||||
|
||||
The protobuf should be one returned from the Cloud Datastore
|
||||
Protobuf API.
|
||||
|
||||
:type pb: :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:param pb: The Protobuf representing the entity.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.entity.Entity`
|
||||
:returns: The entity derived from the protobuf.
|
||||
"""
|
||||
key = None
|
||||
if pb.HasField('key'): # Message field (Key)
|
||||
key = key_from_protobuf(pb.key)
|
||||
|
||||
entity_props = {}
|
||||
entity_meanings = {}
|
||||
exclude_from_indexes = []
|
||||
|
||||
for prop_name, value_pb in _property_tuples(pb):
|
||||
value = _get_value_from_value_pb(value_pb)
|
||||
entity_props[prop_name] = value
|
||||
|
||||
# Check if the property has an associated meaning.
|
||||
is_list = isinstance(value, list)
|
||||
meaning = _get_meaning(value_pb, is_list=is_list)
|
||||
if meaning is not None:
|
||||
entity_meanings[prop_name] = (meaning, value)
|
||||
|
||||
# Check if ``value_pb`` was excluded from index. Lists need to be
|
||||
# special-cased and we require all ``exclude_from_indexes`` values
|
||||
# in a list agree.
|
||||
if is_list:
|
||||
exclude_values = set(value_pb.exclude_from_indexes
|
||||
for value_pb in value_pb.array_value.values)
|
||||
if len(exclude_values) != 1:
|
||||
raise ValueError('For an array_value, subvalues must either '
|
||||
'all be indexed or all excluded from '
|
||||
'indexes.')
|
||||
|
||||
if exclude_values.pop():
|
||||
exclude_from_indexes.append(prop_name)
|
||||
else:
|
||||
if value_pb.exclude_from_indexes:
|
||||
exclude_from_indexes.append(prop_name)
|
||||
|
||||
entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes)
|
||||
entity.update(entity_props)
|
||||
entity._meanings.update(entity_meanings)
|
||||
return entity
|
||||
|
||||
|
||||
def _set_pb_meaning_from_entity(entity, name, value, value_pb,
|
||||
is_list=False):
|
||||
"""Add meaning information (from an entity) to a protobuf.
|
||||
|
||||
:type entity: :class:`gcloud.datastore.entity.Entity`
|
||||
:param entity: The entity to be turned into a protobuf.
|
||||
|
||||
:type name: string
|
||||
:param name: The name of the property.
|
||||
|
||||
:type value: object
|
||||
:param value: The current value stored as property ``name``.
|
||||
|
||||
:type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
|
||||
:param value_pb: The protobuf value to add meaning / meanings to.
|
||||
|
||||
:type is_list: bool
|
||||
:param is_list: (Optional) Boolean indicating if the ``value`` is
|
||||
a list value.
|
||||
"""
|
||||
if name not in entity._meanings:
|
||||
return
|
||||
|
||||
meaning, orig_value = entity._meanings[name]
|
||||
# Only add the meaning back to the protobuf if the value is
|
||||
# unchanged from when it was originally read from the API.
|
||||
if orig_value is not value:
|
||||
return
|
||||
|
||||
# For lists, we set meaning on each sub-element.
|
||||
if is_list:
|
||||
if not isinstance(meaning, list):
|
||||
meaning = itertools.repeat(meaning)
|
||||
val_iter = six.moves.zip(value_pb.array_value.values,
|
||||
meaning)
|
||||
for sub_value_pb, sub_meaning in val_iter:
|
||||
if sub_meaning is not None:
|
||||
sub_value_pb.meaning = sub_meaning
|
||||
else:
|
||||
value_pb.meaning = meaning
|
||||
|
||||
|
||||
def entity_to_protobuf(entity):
|
||||
"""Converts an entity into a protobuf.
|
||||
|
||||
:type entity: :class:`gcloud.datastore.entity.Entity`
|
||||
:param entity: The entity to be turned into a protobuf.
|
||||
|
||||
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Entity`
|
||||
:returns: The protobuf representing the entity.
|
||||
"""
|
||||
entity_pb = _entity_pb2.Entity()
|
||||
if entity.key is not None:
|
||||
key_pb = entity.key.to_protobuf()
|
||||
entity_pb.key.CopyFrom(key_pb)
|
||||
|
||||
for name, value in entity.items():
|
||||
value_is_list = isinstance(value, list)
|
||||
if value_is_list and len(value) == 0:
|
||||
continue
|
||||
|
||||
value_pb = _new_value_pb(entity_pb, name)
|
||||
# Set the appropriate value.
|
||||
_set_protobuf_value(value_pb, value)
|
||||
|
||||
# Add index information to protobuf.
|
||||
if name in entity.exclude_from_indexes:
|
||||
if not value_is_list:
|
||||
value_pb.exclude_from_indexes = True
|
||||
|
||||
for sub_value in value_pb.array_value.values:
|
||||
sub_value.exclude_from_indexes = True
|
||||
|
||||
# Add meaning information to protobuf.
|
||||
_set_pb_meaning_from_entity(entity, name, value, value_pb,
|
||||
is_list=value_is_list)
|
||||
|
||||
return entity_pb
|
||||
|
||||
|
||||
def key_from_protobuf(pb):
|
||||
"""Factory method for creating a key based on a protobuf.
|
||||
|
||||
The protobuf should be one returned from the Cloud Datastore
|
||||
Protobuf API.
|
||||
|
||||
:type pb: :class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:param pb: The Protobuf representing the key.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.key.Key`
|
||||
:returns: a new `Key` instance
|
||||
"""
|
||||
path_args = []
|
||||
for element in pb.path:
|
||||
path_args.append(element.kind)
|
||||
if element.id: # Simple field (int64)
|
||||
path_args.append(element.id)
|
||||
# This is safe: we expect proto objects returned will only have
|
||||
# one of `name` or `id` set.
|
||||
if element.name: # Simple field (string)
|
||||
path_args.append(element.name)
|
||||
|
||||
project = None
|
||||
if pb.partition_id.project_id: # Simple field (string)
|
||||
project = pb.partition_id.project_id
|
||||
namespace = None
|
||||
if pb.partition_id.namespace_id: # Simple field (string)
|
||||
namespace = pb.partition_id.namespace_id
|
||||
|
||||
return Key(*path_args, namespace=namespace, project=project)
|
||||
|
||||
|
||||
def _pb_attr_value(val):
|
||||
"""Given a value, return the protobuf attribute name and proper value.
|
||||
|
||||
The Protobuf API uses different attribute names based on value types
|
||||
rather than inferring the type. This function simply determines the
|
||||
proper attribute name based on the type of the value provided and
|
||||
returns the attribute name as well as a properly formatted value.
|
||||
|
||||
Certain value types need to be coerced into a different type (such
|
||||
as a `datetime.datetime` into an integer timestamp, or a
|
||||
`gcloud.datastore.key.Key` into a Protobuf representation. This
|
||||
function handles that for you.
|
||||
|
||||
.. note::
|
||||
Values which are "text" ('unicode' in Python2, 'str' in Python3) map
|
||||
to 'string_value' in the datastore; values which are "bytes"
|
||||
('str' in Python2, 'bytes' in Python3) map to 'blob_value'.
|
||||
|
||||
For example:
|
||||
|
||||
>>> _pb_attr_value(1234)
|
||||
('integer_value', 1234)
|
||||
>>> _pb_attr_value('my_string')
|
||||
('string_value', 'my_string')
|
||||
|
||||
:type val: `datetime.datetime`, :class:`gcloud.datastore.key.Key`,
|
||||
bool, float, integer, string
|
||||
:param val: The value to be scrutinized.
|
||||
|
||||
:returns: A tuple of the attribute name and proper value type.
|
||||
"""
|
||||
|
||||
if isinstance(val, datetime.datetime):
|
||||
name = 'timestamp'
|
||||
value = _datetime_to_pb_timestamp(val)
|
||||
elif isinstance(val, Key):
|
||||
name, value = 'key', val.to_protobuf()
|
||||
elif isinstance(val, bool):
|
||||
name, value = 'boolean', val
|
||||
elif isinstance(val, float):
|
||||
name, value = 'double', val
|
||||
elif isinstance(val, six.integer_types):
|
||||
name, value = 'integer', val
|
||||
elif isinstance(val, six.text_type):
|
||||
name, value = 'string', val
|
||||
elif isinstance(val, (bytes, str)):
|
||||
name, value = 'blob', val
|
||||
elif isinstance(val, Entity):
|
||||
name, value = 'entity', val
|
||||
elif isinstance(val, list):
|
||||
name, value = 'array', val
|
||||
elif isinstance(val, GeoPoint):
|
||||
name, value = 'geo_point', val.to_protobuf()
|
||||
elif val is None:
|
||||
name, value = 'null', struct_pb2.NULL_VALUE
|
||||
else:
|
||||
raise ValueError("Unknown protobuf attr type %s" % type(val))
|
||||
|
||||
return name + '_value', value
|
||||
|
||||
|
||||
def _get_value_from_value_pb(value_pb):
|
||||
"""Given a protobuf for a Value, get the correct value.
|
||||
|
||||
The Cloud Datastore Protobuf API returns a Property Protobuf which
|
||||
has one value set and the rest blank. This function retrieves the
|
||||
the one value provided.
|
||||
|
||||
Some work is done to coerce the return value into a more useful type
|
||||
(particularly in the case of a timestamp value, or a key value).
|
||||
|
||||
:type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
|
||||
:param value_pb: The Value Protobuf.
|
||||
|
||||
:returns: The value provided by the Protobuf.
|
||||
:raises: :class:`ValueError <exceptions.ValueError>` if no value type
|
||||
has been set.
|
||||
"""
|
||||
value_type = value_pb.WhichOneof('value_type')
|
||||
|
||||
if value_type == 'timestamp_value':
|
||||
result = _pb_timestamp_to_datetime(value_pb.timestamp_value)
|
||||
|
||||
elif value_type == 'key_value':
|
||||
result = key_from_protobuf(value_pb.key_value)
|
||||
|
||||
elif value_type == 'boolean_value':
|
||||
result = value_pb.boolean_value
|
||||
|
||||
elif value_type == 'double_value':
|
||||
result = value_pb.double_value
|
||||
|
||||
elif value_type == 'integer_value':
|
||||
result = value_pb.integer_value
|
||||
|
||||
elif value_type == 'string_value':
|
||||
result = value_pb.string_value
|
||||
|
||||
elif value_type == 'blob_value':
|
||||
result = value_pb.blob_value
|
||||
|
||||
elif value_type == 'entity_value':
|
||||
result = entity_from_protobuf(value_pb.entity_value)
|
||||
|
||||
elif value_type == 'array_value':
|
||||
result = [_get_value_from_value_pb(value)
|
||||
for value in value_pb.array_value.values]
|
||||
|
||||
elif value_type == 'geo_point_value':
|
||||
result = GeoPoint(value_pb.geo_point_value.latitude,
|
||||
value_pb.geo_point_value.longitude)
|
||||
|
||||
elif value_type == 'null_value':
|
||||
result = None
|
||||
|
||||
else:
|
||||
raise ValueError('Value protobuf did not have any value set')
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _set_protobuf_value(value_pb, val):
|
||||
"""Assign 'val' to the correct subfield of 'value_pb'.
|
||||
|
||||
The Protobuf API uses different attribute names based on value types
|
||||
rather than inferring the type.
|
||||
|
||||
Some value types (entities, keys, lists) cannot be directly
|
||||
assigned; this function handles them correctly.
|
||||
|
||||
:type value_pb: :class:`gcloud.datastore._generated.entity_pb2.Value`
|
||||
:param value_pb: The value protobuf to which the value is being assigned.
|
||||
|
||||
:type val: :class:`datetime.datetime`, boolean, float, integer, string,
|
||||
:class:`gcloud.datastore.key.Key`,
|
||||
:class:`gcloud.datastore.entity.Entity`
|
||||
:param val: The value to be assigned.
|
||||
"""
|
||||
attr, val = _pb_attr_value(val)
|
||||
if attr == 'key_value':
|
||||
value_pb.key_value.CopyFrom(val)
|
||||
elif attr == 'timestamp_value':
|
||||
value_pb.timestamp_value.CopyFrom(val)
|
||||
elif attr == 'entity_value':
|
||||
entity_pb = entity_to_protobuf(val)
|
||||
value_pb.entity_value.CopyFrom(entity_pb)
|
||||
elif attr == 'array_value':
|
||||
l_pb = value_pb.array_value.values
|
||||
for item in val:
|
||||
i_pb = l_pb.add()
|
||||
_set_protobuf_value(i_pb, item)
|
||||
elif attr == 'geo_point_value':
|
||||
value_pb.geo_point_value.CopyFrom(val)
|
||||
else: # scalar, just assign
|
||||
setattr(value_pb, attr, val)
|
||||
|
||||
|
||||
class GeoPoint(object):
|
||||
"""Simple container for a geo point value.
|
||||
|
||||
:type latitude: float
|
||||
:param latitude: Latitude of a point.
|
||||
|
||||
:type longitude: float
|
||||
:param longitude: Longitude of a point.
|
||||
"""
|
||||
|
||||
def __init__(self, latitude, longitude):
|
||||
self.latitude = latitude
|
||||
self.longitude = longitude
|
||||
|
||||
def to_protobuf(self):
|
||||
"""Convert the current object to protobuf.
|
||||
|
||||
:rtype: :class:`google.type.latlng_pb2.LatLng`.
|
||||
:returns: The current point as a protobuf.
|
||||
"""
|
||||
return latlng_pb2.LatLng(latitude=self.latitude,
|
||||
longitude=self.longitude)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compare two geo points for equality.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: True if the points compare equal, else False.
|
||||
"""
|
||||
if not isinstance(other, GeoPoint):
|
||||
return False
|
||||
|
||||
return (self.latitude == other.latitude and
|
||||
self.longitude == other.longitude)
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Compare two geo points for inequality.
|
||||
|
||||
:rtype: boolean
|
||||
:returns: False if the points compare equal, else True.
|
||||
"""
|
||||
return not self.__eq__(other)
|
404
venv/Lib/site-packages/gcloud/datastore/key.py
Normal file
404
venv/Lib/site-packages/gcloud/datastore/key.py
Normal file
|
@ -0,0 +1,404 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Create / interact with gcloud datastore keys."""
|
||||
|
||||
import copy
|
||||
import six
|
||||
|
||||
from gcloud.datastore._generated import entity_pb2 as _entity_pb2
|
||||
|
||||
|
||||
class Key(object):
|
||||
"""An immutable representation of a datastore Key.
|
||||
|
||||
To create a basic key:
|
||||
|
||||
>>> Key('EntityKind', 1234)
|
||||
<Key[{'kind': 'EntityKind', 'id': 1234}]>
|
||||
>>> Key('EntityKind', 'foo')
|
||||
<Key[{'kind': 'EntityKind', 'name': 'foo'}]>
|
||||
|
||||
To create a key with a parent:
|
||||
|
||||
>>> Key('Parent', 'foo', 'Child', 1234)
|
||||
<Key[{'kind': 'Parent', 'name': 'foo'}, {'kind': 'Child', 'id': 1234}]>
|
||||
>>> Key('Child', 1234, parent=parent_key)
|
||||
<Key[{'kind': 'Parent', 'name': 'foo'}, {'kind': 'Child', 'id': 1234}]>
|
||||
|
||||
To create a partial key:
|
||||
|
||||
>>> Key('Parent', 'foo', 'Child')
|
||||
<Key[{'kind': 'Parent', 'name': 'foo'}, {'kind': 'Child'}]>
|
||||
|
||||
:type path_args: tuple of string and integer
|
||||
:param path_args: May represent a partial (odd length) or full (even
|
||||
length) key path.
|
||||
|
||||
:type kwargs: dict
|
||||
:param kwargs: Keyword arguments to be passed in.
|
||||
|
||||
Accepted keyword arguments are
|
||||
|
||||
* namespace (string): A namespace identifier for the key.
|
||||
* project (string): The project associated with the key.
|
||||
* parent (:class:`gcloud.datastore.key.Key`): The parent of the key.
|
||||
|
||||
The project argument is required unless it has been set implicitly.
|
||||
"""
|
||||
|
||||
def __init__(self, *path_args, **kwargs):
|
||||
self._flat_path = path_args
|
||||
parent = self._parent = kwargs.get('parent')
|
||||
self._namespace = kwargs.get('namespace')
|
||||
project = kwargs.get('project')
|
||||
self._project = _validate_project(project, parent)
|
||||
# _flat_path, _parent, _namespace and _project must be set before
|
||||
# _combine_args() is called.
|
||||
self._path = self._combine_args()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compare two keys for equality.
|
||||
|
||||
Incomplete keys never compare equal to any other key.
|
||||
|
||||
Completed keys compare equal if they have the same path, project,
|
||||
and namespace.
|
||||
|
||||
:rtype: bool
|
||||
:returns: True if the keys compare equal, else False.
|
||||
"""
|
||||
if not isinstance(other, Key):
|
||||
return False
|
||||
|
||||
if self.is_partial or other.is_partial:
|
||||
return False
|
||||
|
||||
return (self.flat_path == other.flat_path and
|
||||
self.project == other.project and
|
||||
self.namespace == other.namespace)
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Compare two keys for inequality.
|
||||
|
||||
Incomplete keys never compare equal to any other key.
|
||||
|
||||
Completed keys compare equal if they have the same path, project,
|
||||
and namespace.
|
||||
|
||||
:rtype: bool
|
||||
:returns: False if the keys compare equal, else True.
|
||||
"""
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __hash__(self):
|
||||
"""Hash a keys for use in a dictionary lookp.
|
||||
|
||||
:rtype: integer
|
||||
:returns: a hash of the key's state.
|
||||
"""
|
||||
return (hash(self.flat_path) +
|
||||
hash(self.project) +
|
||||
hash(self.namespace))
|
||||
|
||||
@staticmethod
|
||||
def _parse_path(path_args):
|
||||
"""Parses positional arguments into key path with kinds and IDs.
|
||||
|
||||
:type path_args: tuple
|
||||
:param path_args: A tuple from positional arguments. Should be
|
||||
alternating list of kinds (string) and ID/name
|
||||
parts (int or string).
|
||||
|
||||
:rtype: :class:`list` of :class:`dict`
|
||||
:returns: A list of key parts with kind and ID or name set.
|
||||
:raises: :class:`ValueError` if there are no ``path_args``, if one of
|
||||
the kinds is not a string or if one of the IDs/names is not
|
||||
a string or an integer.
|
||||
"""
|
||||
if len(path_args) == 0:
|
||||
raise ValueError('Key path must not be empty.')
|
||||
|
||||
kind_list = path_args[::2]
|
||||
id_or_name_list = path_args[1::2]
|
||||
# Dummy sentinel value to pad incomplete key to even length path.
|
||||
partial_ending = object()
|
||||
if len(path_args) % 2 == 1:
|
||||
id_or_name_list += (partial_ending,)
|
||||
|
||||
result = []
|
||||
for kind, id_or_name in zip(kind_list, id_or_name_list):
|
||||
curr_key_part = {}
|
||||
if isinstance(kind, six.string_types):
|
||||
curr_key_part['kind'] = kind
|
||||
else:
|
||||
raise ValueError(kind, 'Kind was not a string.')
|
||||
|
||||
if isinstance(id_or_name, six.string_types):
|
||||
curr_key_part['name'] = id_or_name
|
||||
elif isinstance(id_or_name, six.integer_types):
|
||||
curr_key_part['id'] = id_or_name
|
||||
elif id_or_name is not partial_ending:
|
||||
raise ValueError(id_or_name,
|
||||
'ID/name was not a string or integer.')
|
||||
|
||||
result.append(curr_key_part)
|
||||
|
||||
return result
|
||||
|
||||
def _combine_args(self):
|
||||
"""Sets protected data by combining raw data set from the constructor.
|
||||
|
||||
If a ``_parent`` is set, updates the ``_flat_path`` and sets the
|
||||
``_namespace`` and ``_project`` if not already set.
|
||||
|
||||
:rtype: :class:`list` of :class:`dict`
|
||||
:returns: A list of key parts with kind and ID or name set.
|
||||
:raises: :class:`ValueError` if the parent key is not complete.
|
||||
"""
|
||||
child_path = self._parse_path(self._flat_path)
|
||||
|
||||
if self._parent is not None:
|
||||
if self._parent.is_partial:
|
||||
raise ValueError('Parent key must be complete.')
|
||||
|
||||
# We know that _parent.path() will return a copy.
|
||||
child_path = self._parent.path + child_path
|
||||
self._flat_path = self._parent.flat_path + self._flat_path
|
||||
if (self._namespace is not None and
|
||||
self._namespace != self._parent.namespace):
|
||||
raise ValueError('Child namespace must agree with parent\'s.')
|
||||
self._namespace = self._parent.namespace
|
||||
if (self._project is not None and
|
||||
self._project != self._parent.project):
|
||||
raise ValueError('Child project must agree with parent\'s.')
|
||||
self._project = self._parent.project
|
||||
|
||||
return child_path
|
||||
|
||||
def _clone(self):
|
||||
"""Duplicates the Key.
|
||||
|
||||
Most attributes are simple types, so don't require copying. Other
|
||||
attributes like ``parent`` are long-lived and so we re-use them.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.key.Key`
|
||||
:returns: A new ``Key`` instance with the same data as the current one.
|
||||
"""
|
||||
cloned_self = self.__class__(*self.flat_path,
|
||||
project=self.project,
|
||||
namespace=self.namespace)
|
||||
# If the current parent has already been set, we re-use
|
||||
# the same instance
|
||||
cloned_self._parent = self._parent
|
||||
return cloned_self
|
||||
|
||||
def completed_key(self, id_or_name):
|
||||
"""Creates new key from existing partial key by adding final ID/name.
|
||||
|
||||
:type id_or_name: string or integer
|
||||
:param id_or_name: ID or name to be added to the key.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.key.Key`
|
||||
:returns: A new ``Key`` instance with the same data as the current one
|
||||
and an extra ID or name added.
|
||||
:raises: :class:`ValueError` if the current key is not partial or if
|
||||
``id_or_name`` is not a string or integer.
|
||||
"""
|
||||
if not self.is_partial:
|
||||
raise ValueError('Only a partial key can be completed.')
|
||||
|
||||
id_or_name_key = None
|
||||
if isinstance(id_or_name, six.string_types):
|
||||
id_or_name_key = 'name'
|
||||
elif isinstance(id_or_name, six.integer_types):
|
||||
id_or_name_key = 'id'
|
||||
else:
|
||||
raise ValueError(id_or_name,
|
||||
'ID/name was not a string or integer.')
|
||||
|
||||
new_key = self._clone()
|
||||
new_key._path[-1][id_or_name_key] = id_or_name
|
||||
new_key._flat_path += (id_or_name,)
|
||||
return new_key
|
||||
|
||||
def to_protobuf(self):
|
||||
"""Return a protobuf corresponding to the key.
|
||||
|
||||
:rtype: :class:`gcloud.datastore._generated.entity_pb2.Key`
|
||||
:returns: The protobuf representing the key.
|
||||
"""
|
||||
key = _entity_pb2.Key()
|
||||
key.partition_id.project_id = self.project
|
||||
|
||||
if self.namespace:
|
||||
key.partition_id.namespace_id = self.namespace
|
||||
|
||||
for item in self.path:
|
||||
element = key.path.add()
|
||||
if 'kind' in item:
|
||||
element.kind = item['kind']
|
||||
if 'id' in item:
|
||||
element.id = item['id']
|
||||
if 'name' in item:
|
||||
element.name = item['name']
|
||||
|
||||
return key
|
||||
|
||||
@property
|
||||
def is_partial(self):
|
||||
"""Boolean indicating if the key has an ID (or name).
|
||||
|
||||
:rtype: bool
|
||||
:returns: ``True`` if the last element of the key's path does not have
|
||||
an ``id`` or a ``name``.
|
||||
"""
|
||||
return self.id_or_name is None
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
"""Namespace getter.
|
||||
|
||||
:rtype: string
|
||||
:returns: The namespace of the current key.
|
||||
"""
|
||||
return self._namespace
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
"""Path getter.
|
||||
|
||||
Returns a copy so that the key remains immutable.
|
||||
|
||||
:rtype: :class:`list` of :class:`dict`
|
||||
:returns: The (key) path of the current key.
|
||||
"""
|
||||
return copy.deepcopy(self._path)
|
||||
|
||||
@property
|
||||
def flat_path(self):
|
||||
"""Getter for the key path as a tuple.
|
||||
|
||||
:rtype: tuple of string and integer
|
||||
:returns: The tuple of elements in the path.
|
||||
"""
|
||||
return self._flat_path
|
||||
|
||||
@property
|
||||
def kind(self):
|
||||
"""Kind getter. Based on the last element of path.
|
||||
|
||||
:rtype: string
|
||||
:returns: The kind of the current key.
|
||||
"""
|
||||
return self.path[-1]['kind']
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""ID getter. Based on the last element of path.
|
||||
|
||||
:rtype: integer
|
||||
:returns: The (integer) ID of the key.
|
||||
"""
|
||||
return self.path[-1].get('id')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Name getter. Based on the last element of path.
|
||||
|
||||
:rtype: string
|
||||
:returns: The (string) name of the key.
|
||||
"""
|
||||
return self.path[-1].get('name')
|
||||
|
||||
@property
|
||||
def id_or_name(self):
|
||||
"""Getter. Based on the last element of path.
|
||||
|
||||
:rtype: integer (if ``id``) or string (if ``name``)
|
||||
:returns: The last element of the key's path if it is either an ``id``
|
||||
or a ``name``.
|
||||
"""
|
||||
return self.id or self.name
|
||||
|
||||
@property
|
||||
def project(self):
|
||||
"""Project getter.
|
||||
|
||||
:rtype: string
|
||||
:returns: The key's project.
|
||||
"""
|
||||
return self._project
|
||||
|
||||
def _make_parent(self):
|
||||
"""Creates a parent key for the current path.
|
||||
|
||||
Extracts all but the last element in the key path and creates a new
|
||||
key, while still matching the namespace and the project.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType`
|
||||
:returns: A new ``Key`` instance, whose path consists of all but the
|
||||
last element of current path. If the current key has only
|
||||
one path element, returns ``None``.
|
||||
"""
|
||||
if self.is_partial:
|
||||
parent_args = self.flat_path[:-1]
|
||||
else:
|
||||
parent_args = self.flat_path[:-2]
|
||||
if parent_args:
|
||||
return self.__class__(*parent_args, project=self.project,
|
||||
namespace=self.namespace)
|
||||
|
||||
@property
|
||||
def parent(self):
|
||||
"""The parent of the current key.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.key.Key` or :class:`NoneType`
|
||||
:returns: A new ``Key`` instance, whose path consists of all but the
|
||||
last element of current path. If the current key has only
|
||||
one path element, returns ``None``.
|
||||
"""
|
||||
if self._parent is None:
|
||||
self._parent = self._make_parent()
|
||||
|
||||
return self._parent
|
||||
|
||||
def __repr__(self):
|
||||
return '<Key%s, project=%s>' % (self.path, self.project)
|
||||
|
||||
|
||||
def _validate_project(project, parent):
|
||||
"""Ensure the project is set appropriately.
|
||||
|
||||
If ``parent`` is passed, skip the test (it will be checked / fixed up
|
||||
later).
|
||||
|
||||
If ``project`` is unset, attempt to infer the project from the environment.
|
||||
|
||||
:type project: string
|
||||
:param project: A project.
|
||||
|
||||
:type parent: :class:`gcloud.datastore.key.Key` or ``NoneType``
|
||||
:param parent: The parent of the key or ``None``.
|
||||
|
||||
:rtype: string
|
||||
:returns: The ``project`` passed in, or implied from the environment.
|
||||
:raises: :class:`ValueError` if ``project`` is ``None`` and no project
|
||||
can be inferred from the parent.
|
||||
"""
|
||||
if parent is None:
|
||||
if project is None:
|
||||
raise ValueError("A Key must have a project set.")
|
||||
|
||||
return project
|
531
venv/Lib/site-packages/gcloud/datastore/query.py
Normal file
531
venv/Lib/site-packages/gcloud/datastore/query.py
Normal file
|
@ -0,0 +1,531 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Create / interact with gcloud datastore queries."""
|
||||
|
||||
import base64
|
||||
|
||||
from gcloud._helpers import _ensure_tuple_or_list
|
||||
from gcloud.datastore._generated import query_pb2 as _query_pb2
|
||||
from gcloud.datastore import helpers
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
|
||||
class Query(object):
|
||||
"""A Query against the Cloud Datastore.
|
||||
|
||||
This class serves as an abstraction for creating a query over data
|
||||
stored in the Cloud Datastore.
|
||||
|
||||
:type client: :class:`gcloud.datastore.client.Client`
|
||||
:param client: The client used to connect to datastore.
|
||||
|
||||
:type kind: string
|
||||
:param kind: The kind to query.
|
||||
|
||||
:type project: string
|
||||
:param project: The project associated with the query. If not passed,
|
||||
uses the client's value.
|
||||
|
||||
:type namespace: string or None
|
||||
:param namespace: The namespace to which to restrict results. If not
|
||||
passed, uses the client's value.
|
||||
|
||||
:type ancestor: :class:`gcloud.datastore.key.Key` or None
|
||||
:param ancestor: key of the ancestor to which this query's results are
|
||||
restricted.
|
||||
|
||||
:type filters: sequence of (property_name, operator, value) tuples
|
||||
:param filters: property filters applied by this query.
|
||||
|
||||
:type projection: sequence of string
|
||||
:param projection: fields returned as part of query results.
|
||||
|
||||
:type order: sequence of string
|
||||
:param order: field names used to order query results. Prepend '-'
|
||||
to a field name to sort it in descending order.
|
||||
|
||||
:type distinct_on: sequence of string
|
||||
:param distinct_on: field names used to group query results.
|
||||
|
||||
:raises: ValueError if ``project`` is not passed and no implicit
|
||||
default is set.
|
||||
"""
|
||||
|
||||
OPERATORS = {
|
||||
'<=': _query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL,
|
||||
'>=': _query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL,
|
||||
'<': _query_pb2.PropertyFilter.LESS_THAN,
|
||||
'>': _query_pb2.PropertyFilter.GREATER_THAN,
|
||||
'=': _query_pb2.PropertyFilter.EQUAL,
|
||||
}
|
||||
"""Mapping of operator strings and their protobuf equivalents."""
|
||||
|
||||
def __init__(self,
|
||||
client,
|
||||
kind=None,
|
||||
project=None,
|
||||
namespace=None,
|
||||
ancestor=None,
|
||||
filters=(),
|
||||
projection=(),
|
||||
order=(),
|
||||
distinct_on=()):
|
||||
|
||||
self._client = client
|
||||
self._kind = kind
|
||||
self._project = project or client.project
|
||||
self._namespace = namespace or client.namespace
|
||||
self._ancestor = ancestor
|
||||
self._filters = []
|
||||
# Verify filters passed in.
|
||||
for property_name, operator, value in filters:
|
||||
self.add_filter(property_name, operator, value)
|
||||
self._projection = _ensure_tuple_or_list('projection', projection)
|
||||
self._order = _ensure_tuple_or_list('order', order)
|
||||
self._distinct_on = _ensure_tuple_or_list('distinct_on', distinct_on)
|
||||
|
||||
@property
|
||||
def project(self):
|
||||
"""Get the project for this Query.
|
||||
|
||||
:rtype: str
|
||||
"""
|
||||
return self._project or self._client.project
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
"""This query's namespace
|
||||
|
||||
:rtype: string or None
|
||||
:returns: the namespace assigned to this query
|
||||
"""
|
||||
return self._namespace or self._client.namespace
|
||||
|
||||
@namespace.setter
|
||||
def namespace(self, value):
|
||||
"""Update the query's namespace.
|
||||
|
||||
:type value: string
|
||||
"""
|
||||
if not isinstance(value, str):
|
||||
raise ValueError("Namespace must be a string")
|
||||
self._namespace = value
|
||||
|
||||
@property
|
||||
def kind(self):
|
||||
"""Get the Kind of the Query.
|
||||
|
||||
:rtype: string
|
||||
"""
|
||||
return self._kind
|
||||
|
||||
@kind.setter
|
||||
def kind(self, value):
|
||||
"""Update the Kind of the Query.
|
||||
|
||||
:type value: string
|
||||
:param value: updated kind for the query.
|
||||
|
||||
.. note::
|
||||
|
||||
The protobuf specification allows for ``kind`` to be repeated,
|
||||
but the current implementation returns an error if more than
|
||||
one value is passed. If the back-end changes in the future to
|
||||
allow multiple values, this method will be updated to allow passing
|
||||
either a string or a sequence of strings.
|
||||
"""
|
||||
if not isinstance(value, str):
|
||||
raise TypeError("Kind must be a string")
|
||||
self._kind = value
|
||||
|
||||
@property
|
||||
def ancestor(self):
|
||||
"""The ancestor key for the query.
|
||||
|
||||
:rtype: Key or None
|
||||
"""
|
||||
return self._ancestor
|
||||
|
||||
@ancestor.setter
|
||||
def ancestor(self, value):
|
||||
"""Set the ancestor for the query
|
||||
|
||||
:type value: Key
|
||||
:param value: the new ancestor key
|
||||
"""
|
||||
if not isinstance(value, Key):
|
||||
raise TypeError("Ancestor must be a Key")
|
||||
self._ancestor = value
|
||||
|
||||
@ancestor.deleter
|
||||
def ancestor(self):
|
||||
"""Remove the ancestor for the query."""
|
||||
self._ancestor = None
|
||||
|
||||
@property
|
||||
def filters(self):
|
||||
"""Filters set on the query.
|
||||
|
||||
:rtype: sequence of (property_name, operator, value) tuples.
|
||||
"""
|
||||
return self._filters[:]
|
||||
|
||||
def add_filter(self, property_name, operator, value):
|
||||
"""Filter the query based on a property name, operator and a value.
|
||||
|
||||
Expressions take the form of::
|
||||
|
||||
.add_filter('<property>', '<operator>', <value>)
|
||||
|
||||
where property is a property stored on the entity in the datastore
|
||||
and operator is one of ``OPERATORS``
|
||||
(ie, ``=``, ``<``, ``<=``, ``>``, ``>=``)::
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>> client = datastore.Client()
|
||||
>>> query = client.query(kind='Person')
|
||||
>>> query.add_filter('name', '=', 'James')
|
||||
>>> query.add_filter('age', '>', 50)
|
||||
|
||||
:type property_name: string
|
||||
:param property_name: A property name.
|
||||
|
||||
:type operator: string
|
||||
:param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
|
||||
|
||||
:type value: :class:`int`, :class:`str`, :class:`bool`,
|
||||
:class:`float`, :class:`NoneType`,
|
||||
:class:`datetime.datetime`,
|
||||
:class:`gcloud.datastore.key.Key`
|
||||
:param value: The value to filter on.
|
||||
|
||||
:raises: :class:`ValueError` if ``operation`` is not one of the
|
||||
specified values, or if a filter names ``'__key__'`` but
|
||||
passes an invalid value (a key is required).
|
||||
"""
|
||||
if self.OPERATORS.get(operator) is None:
|
||||
error_message = 'Invalid expression: "%s"' % (operator,)
|
||||
choices_message = 'Please use one of: =, <, <=, >, >=.'
|
||||
raise ValueError(error_message, choices_message)
|
||||
|
||||
if property_name == '__key__' and not isinstance(value, Key):
|
||||
raise ValueError('Invalid key: "%s"' % value)
|
||||
|
||||
self._filters.append((property_name, operator, value))
|
||||
|
||||
@property
|
||||
def projection(self):
|
||||
"""Fields names returned by the query.
|
||||
|
||||
:rtype: sequence of string
|
||||
:returns: Names of fields in query results.
|
||||
"""
|
||||
return self._projection[:]
|
||||
|
||||
@projection.setter
|
||||
def projection(self, projection):
|
||||
"""Set the fields returned the query.
|
||||
|
||||
:type projection: string or sequence of strings
|
||||
:param projection: Each value is a string giving the name of a
|
||||
property to be included in the projection query.
|
||||
"""
|
||||
if isinstance(projection, str):
|
||||
projection = [projection]
|
||||
self._projection[:] = projection
|
||||
|
||||
def keys_only(self):
|
||||
"""Set the projection to include only keys."""
|
||||
self._projection[:] = ['__key__']
|
||||
|
||||
def key_filter(self, key, operator='='):
|
||||
"""Filter on a key.
|
||||
|
||||
:type key: :class:`gcloud.datastore.key.Key`
|
||||
:param key: The key to filter on.
|
||||
|
||||
:type operator: string
|
||||
:param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
|
||||
Defaults to ``=``.
|
||||
"""
|
||||
self.add_filter('__key__', operator, key)
|
||||
|
||||
@property
|
||||
def order(self):
|
||||
"""Names of fields used to sort query results.
|
||||
|
||||
:rtype: sequence of string
|
||||
"""
|
||||
return self._order[:]
|
||||
|
||||
@order.setter
|
||||
def order(self, value):
|
||||
"""Set the fields used to sort query results.
|
||||
|
||||
Sort fields will be applied in the order specified.
|
||||
|
||||
:type value: string or sequence of strings
|
||||
:param value: Each value is a string giving the name of the
|
||||
property on which to sort, optionally preceded by a
|
||||
hyphen (-) to specify descending order.
|
||||
Omitting the hyphen implies ascending order.
|
||||
"""
|
||||
if isinstance(value, str):
|
||||
value = [value]
|
||||
self._order[:] = value
|
||||
|
||||
@property
|
||||
def distinct_on(self):
|
||||
"""Names of fields used to group query results.
|
||||
|
||||
:rtype: sequence of string
|
||||
"""
|
||||
return self._distinct_on[:]
|
||||
|
||||
@distinct_on.setter
|
||||
def distinct_on(self, value):
|
||||
"""Set fields used to group query results.
|
||||
|
||||
:type value: string or sequence of strings
|
||||
:param value: Each value is a string giving the name of a
|
||||
property to use to group results together.
|
||||
"""
|
||||
if isinstance(value, str):
|
||||
value = [value]
|
||||
self._distinct_on[:] = value
|
||||
|
||||
def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None,
|
||||
client=None):
|
||||
"""Execute the Query; return an iterator for the matching entities.
|
||||
|
||||
For example::
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>> client = datastore.Client()
|
||||
>>> query = client.query(kind='Person')
|
||||
>>> query.add_filter('name', '=', 'Sally')
|
||||
>>> list(query.fetch())
|
||||
[<Entity object>, <Entity object>, ...]
|
||||
>>> list(query.fetch(1))
|
||||
[<Entity object>]
|
||||
|
||||
:type limit: integer or None
|
||||
:param limit: An optional limit passed through to the iterator.
|
||||
|
||||
:type offset: integer
|
||||
:param offset: An optional offset passed through to the iterator.
|
||||
|
||||
:type start_cursor: bytes
|
||||
:param start_cursor: An optional cursor passed through to the iterator.
|
||||
|
||||
:type end_cursor: bytes
|
||||
:param end_cursor: An optional cursor passed through to the iterator.
|
||||
|
||||
:type client: :class:`gcloud.datastore.client.Client`
|
||||
:param client: client used to connect to datastore.
|
||||
If not supplied, uses the query's value.
|
||||
|
||||
:rtype: :class:`Iterator`
|
||||
:raises: ValueError if ``connection`` is not passed and no implicit
|
||||
default has been set.
|
||||
"""
|
||||
if client is None:
|
||||
client = self._client
|
||||
|
||||
return Iterator(
|
||||
self, client, limit, offset, start_cursor, end_cursor)
|
||||
|
||||
|
||||
class Iterator(object):
|
||||
"""Represent the state of a given execution of a Query.
|
||||
|
||||
:type query: :class:`gcloud.datastore.query.Query`
|
||||
:param query: Query object holding permanent configuration (i.e.
|
||||
things that don't change on with each page in
|
||||
a results set).
|
||||
|
||||
:type client: :class:`gcloud.datastore.client.Client`
|
||||
:param client: The client used to make a request.
|
||||
|
||||
:type limit: integer
|
||||
:param limit: (Optional) Limit the number of results returned.
|
||||
|
||||
:type offset: integer
|
||||
:param offset: (Optional) Offset used to begin a query.
|
||||
|
||||
:type start_cursor: bytes
|
||||
:param start_cursor: (Optional) Cursor to begin paging through
|
||||
query results.
|
||||
|
||||
:type end_cursor: bytes
|
||||
:param end_cursor: (Optional) Cursor to end paging through
|
||||
query results.
|
||||
"""
|
||||
|
||||
_NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED
|
||||
|
||||
_FINISHED = (
|
||||
_query_pb2.QueryResultBatch.NO_MORE_RESULTS,
|
||||
_query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT,
|
||||
_query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_CURSOR,
|
||||
)
|
||||
|
||||
def __init__(self, query, client, limit=None, offset=None,
|
||||
start_cursor=None, end_cursor=None):
|
||||
self._query = query
|
||||
self._client = client
|
||||
self._limit = limit
|
||||
self._offset = offset
|
||||
self._start_cursor = start_cursor
|
||||
self._end_cursor = end_cursor
|
||||
self._page = self._more_results = None
|
||||
self._skipped_results = None
|
||||
|
||||
def next_page(self):
|
||||
"""Fetch a single "page" of query results.
|
||||
|
||||
Low-level API for fine control: the more convenient API is
|
||||
to iterate on the current Iterator.
|
||||
|
||||
:rtype: tuple, (entities, more_results, cursor)
|
||||
"""
|
||||
pb = _pb_from_query(self._query)
|
||||
|
||||
start_cursor = self._start_cursor
|
||||
if start_cursor is not None:
|
||||
pb.start_cursor = base64.urlsafe_b64decode(start_cursor)
|
||||
|
||||
end_cursor = self._end_cursor
|
||||
if end_cursor is not None:
|
||||
pb.end_cursor = base64.urlsafe_b64decode(end_cursor)
|
||||
|
||||
if self._limit is not None:
|
||||
pb.limit.value = self._limit
|
||||
|
||||
if self._offset is not None:
|
||||
pb.offset = self._offset
|
||||
|
||||
transaction = self._client.current_transaction
|
||||
|
||||
query_results = self._client.connection.run_query(
|
||||
query_pb=pb,
|
||||
project=self._query.project,
|
||||
namespace=self._query.namespace,
|
||||
transaction_id=transaction and transaction.id,
|
||||
)
|
||||
(entity_pbs, cursor_as_bytes,
|
||||
more_results_enum, self._skipped_results) = query_results
|
||||
|
||||
if cursor_as_bytes == b'':
|
||||
self._start_cursor = None
|
||||
else:
|
||||
self._start_cursor = base64.urlsafe_b64encode(cursor_as_bytes)
|
||||
self._end_cursor = None
|
||||
|
||||
if more_results_enum == self._NOT_FINISHED:
|
||||
self._more_results = True
|
||||
elif more_results_enum in self._FINISHED:
|
||||
self._more_results = False
|
||||
else:
|
||||
raise ValueError('Unexpected value returned for `more_results`.')
|
||||
|
||||
self._page = [
|
||||
helpers.entity_from_protobuf(entity)
|
||||
for entity in entity_pbs]
|
||||
return self._page, self._more_results, self._start_cursor
|
||||
|
||||
def __iter__(self):
|
||||
"""Generator yielding all results matching our query.
|
||||
|
||||
:rtype: sequence of :class:`gcloud.datastore.entity.Entity`
|
||||
"""
|
||||
while True:
|
||||
self.next_page()
|
||||
for entity in self._page:
|
||||
yield entity
|
||||
if not self._more_results:
|
||||
break
|
||||
num_results = len(self._page)
|
||||
if self._limit is not None:
|
||||
self._limit -= num_results
|
||||
if self._offset is not None and self._skipped_results is not None:
|
||||
# NOTE: The offset goes down relative to the location
|
||||
# because we are updating the cursor each time.
|
||||
self._offset -= self._skipped_results
|
||||
|
||||
|
||||
def _pb_from_query(query):
|
||||
"""Convert a Query instance to the corresponding protobuf.
|
||||
|
||||
:type query: :class:`Query`
|
||||
:param query: The source query.
|
||||
|
||||
:rtype: :class:`gcloud.datastore._generated.query_pb2.Query`
|
||||
:returns: A protobuf that can be sent to the protobuf API. N.b. that
|
||||
it does not contain "in-flight" fields for ongoing query
|
||||
executions (cursors, offset, limit).
|
||||
"""
|
||||
pb = _query_pb2.Query()
|
||||
|
||||
for projection_name in query.projection:
|
||||
pb.projection.add().property.name = projection_name
|
||||
|
||||
if query.kind:
|
||||
pb.kind.add().name = query.kind
|
||||
|
||||
composite_filter = pb.filter.composite_filter
|
||||
composite_filter.op = _query_pb2.CompositeFilter.AND
|
||||
|
||||
if query.ancestor:
|
||||
ancestor_pb = query.ancestor.to_protobuf()
|
||||
|
||||
# Filter on __key__ HAS_ANCESTOR == ancestor.
|
||||
ancestor_filter = composite_filter.filters.add().property_filter
|
||||
ancestor_filter.property.name = '__key__'
|
||||
ancestor_filter.op = _query_pb2.PropertyFilter.HAS_ANCESTOR
|
||||
ancestor_filter.value.key_value.CopyFrom(ancestor_pb)
|
||||
|
||||
for property_name, operator, value in query.filters:
|
||||
pb_op_enum = query.OPERATORS.get(operator)
|
||||
|
||||
# Add the specific filter
|
||||
property_filter = composite_filter.filters.add().property_filter
|
||||
property_filter.property.name = property_name
|
||||
property_filter.op = pb_op_enum
|
||||
|
||||
# Set the value to filter on based on the type.
|
||||
if property_name == '__key__':
|
||||
key_pb = value.to_protobuf()
|
||||
property_filter.value.key_value.CopyFrom(key_pb)
|
||||
else:
|
||||
helpers._set_protobuf_value(property_filter.value, value)
|
||||
|
||||
if not composite_filter.filters:
|
||||
pb.ClearField('filter')
|
||||
|
||||
for prop in query.order:
|
||||
property_order = pb.order.add()
|
||||
|
||||
if prop.startswith('-'):
|
||||
property_order.property.name = prop[1:]
|
||||
property_order.direction = property_order.DESCENDING
|
||||
else:
|
||||
property_order.property.name = prop
|
||||
property_order.direction = property_order.ASCENDING
|
||||
|
||||
for distinct_on_name in query.distinct_on:
|
||||
pb.distinct_on.add().name = distinct_on_name
|
||||
|
||||
return pb
|
400
venv/Lib/site-packages/gcloud/datastore/test_batch.py
Normal file
400
venv/Lib/site-packages/gcloud/datastore/test_batch.py
Normal file
|
@ -0,0 +1,400 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestBatch(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.batch import Batch
|
||||
|
||||
return Batch
|
||||
|
||||
def _makeOne(self, client):
|
||||
return self._getTargetClass()(client)
|
||||
|
||||
def test_ctor(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
_PROJECT = 'PROJECT'
|
||||
_NAMESPACE = 'NAMESPACE'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection, _NAMESPACE)
|
||||
batch = self._makeOne(client)
|
||||
|
||||
self.assertEqual(batch.project, _PROJECT)
|
||||
self.assertEqual(batch.connection, connection)
|
||||
self.assertEqual(batch.namespace, _NAMESPACE)
|
||||
self.assertTrue(batch._id is None)
|
||||
self.assertEqual(batch._status, batch._INITIAL)
|
||||
self.assertTrue(isinstance(batch._commit_request,
|
||||
datastore_pb2.CommitRequest))
|
||||
self.assertTrue(batch.mutations is batch._commit_request.mutations)
|
||||
self.assertEqual(batch._partial_key_entities, [])
|
||||
|
||||
def test_current(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch1 = self._makeOne(client)
|
||||
batch2 = self._makeOne(client)
|
||||
self.assertTrue(batch1.current() is None)
|
||||
self.assertTrue(batch2.current() is None)
|
||||
with batch1:
|
||||
self.assertTrue(batch1.current() is batch1)
|
||||
self.assertTrue(batch2.current() is batch1)
|
||||
with batch2:
|
||||
self.assertTrue(batch1.current() is batch2)
|
||||
self.assertTrue(batch2.current() is batch2)
|
||||
self.assertTrue(batch1.current() is batch1)
|
||||
self.assertTrue(batch2.current() is batch1)
|
||||
self.assertTrue(batch1.current() is None)
|
||||
self.assertTrue(batch2.current() is None)
|
||||
|
||||
def test_put_entity_wo_key(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
|
||||
self.assertRaises(ValueError, batch.put, _Entity())
|
||||
|
||||
def test_put_entity_w_key_wrong_project(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
entity = _Entity()
|
||||
entity.key = _Key('OTHER')
|
||||
|
||||
self.assertRaises(ValueError, batch.put, entity)
|
||||
|
||||
def test_put_entity_w_partial_key(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_PROPERTIES = {'foo': 'bar'}
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
entity = _Entity(_PROPERTIES)
|
||||
key = entity.key = _Key(_PROJECT)
|
||||
key._id = None
|
||||
|
||||
batch.put(entity)
|
||||
|
||||
mutated_entity = _mutated_pb(self, batch.mutations, 'insert')
|
||||
self.assertEqual(mutated_entity.key, key._key)
|
||||
self.assertEqual(batch._partial_key_entities, [entity])
|
||||
|
||||
def test_put_entity_w_completed_key(self):
|
||||
from gcloud.datastore.helpers import _property_tuples
|
||||
|
||||
_PROJECT = 'PROJECT'
|
||||
_PROPERTIES = {
|
||||
'foo': 'bar',
|
||||
'baz': 'qux',
|
||||
'spam': [1, 2, 3],
|
||||
'frotz': [], # will be ignored
|
||||
}
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
entity = _Entity(_PROPERTIES)
|
||||
entity.exclude_from_indexes = ('baz', 'spam')
|
||||
key = entity.key = _Key(_PROJECT)
|
||||
|
||||
batch.put(entity)
|
||||
|
||||
mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
|
||||
self.assertEqual(mutated_entity.key, key._key)
|
||||
|
||||
prop_dict = dict(_property_tuples(mutated_entity))
|
||||
self.assertEqual(len(prop_dict), 3)
|
||||
self.assertFalse(prop_dict['foo'].exclude_from_indexes)
|
||||
self.assertTrue(prop_dict['baz'].exclude_from_indexes)
|
||||
self.assertFalse(prop_dict['spam'].exclude_from_indexes)
|
||||
spam_values = prop_dict['spam'].array_value.values
|
||||
self.assertTrue(spam_values[0].exclude_from_indexes)
|
||||
self.assertTrue(spam_values[1].exclude_from_indexes)
|
||||
self.assertTrue(spam_values[2].exclude_from_indexes)
|
||||
self.assertFalse('frotz' in prop_dict)
|
||||
|
||||
def test_delete_w_partial_key(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
key = _Key(_PROJECT)
|
||||
key._id = None
|
||||
|
||||
self.assertRaises(ValueError, batch.delete, key)
|
||||
|
||||
def test_delete_w_key_wrong_project(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
key = _Key('OTHER')
|
||||
|
||||
self.assertRaises(ValueError, batch.delete, key)
|
||||
|
||||
def test_delete_w_completed_key(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
key = _Key(_PROJECT)
|
||||
|
||||
batch.delete(key)
|
||||
|
||||
mutated_key = _mutated_pb(self, batch.mutations, 'delete')
|
||||
self.assertEqual(mutated_key, key._key)
|
||||
|
||||
def test_begin(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
client = _Client(_PROJECT, None)
|
||||
batch = self._makeOne(client)
|
||||
self.assertEqual(batch._status, batch._INITIAL)
|
||||
batch.begin()
|
||||
self.assertEqual(batch._status, batch._IN_PROGRESS)
|
||||
|
||||
def test_begin_fail(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
client = _Client(_PROJECT, None)
|
||||
batch = self._makeOne(client)
|
||||
batch._status = batch._IN_PROGRESS
|
||||
with self.assertRaises(ValueError):
|
||||
batch.begin()
|
||||
|
||||
def test_rollback(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
client = _Client(_PROJECT, None)
|
||||
batch = self._makeOne(client)
|
||||
self.assertEqual(batch._status, batch._INITIAL)
|
||||
batch.rollback()
|
||||
self.assertEqual(batch._status, batch._ABORTED)
|
||||
|
||||
def test_commit(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
|
||||
self.assertEqual(batch._status, batch._INITIAL)
|
||||
batch.commit()
|
||||
self.assertEqual(batch._status, batch._FINISHED)
|
||||
|
||||
self.assertEqual(connection._committed,
|
||||
[(_PROJECT, batch._commit_request, None)])
|
||||
|
||||
def test_commit_w_partial_key_entities(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_NEW_ID = 1234
|
||||
connection = _Connection(_NEW_ID)
|
||||
client = _Client(_PROJECT, connection)
|
||||
batch = self._makeOne(client)
|
||||
entity = _Entity({})
|
||||
key = entity.key = _Key(_PROJECT)
|
||||
key._id = None
|
||||
batch._partial_key_entities.append(entity)
|
||||
|
||||
self.assertEqual(batch._status, batch._INITIAL)
|
||||
batch.commit()
|
||||
self.assertEqual(batch._status, batch._FINISHED)
|
||||
|
||||
self.assertEqual(connection._committed,
|
||||
[(_PROJECT, batch._commit_request, None)])
|
||||
self.assertFalse(entity.key.is_partial)
|
||||
self.assertEqual(entity.key._id, _NEW_ID)
|
||||
|
||||
def test_as_context_mgr_wo_error(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_PROPERTIES = {'foo': 'bar'}
|
||||
connection = _Connection()
|
||||
entity = _Entity(_PROPERTIES)
|
||||
key = entity.key = _Key(_PROJECT)
|
||||
|
||||
client = _Client(_PROJECT, connection)
|
||||
self.assertEqual(list(client._batches), [])
|
||||
|
||||
with self._makeOne(client) as batch:
|
||||
self.assertEqual(list(client._batches), [batch])
|
||||
batch.put(entity)
|
||||
|
||||
self.assertEqual(list(client._batches), [])
|
||||
|
||||
mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
|
||||
self.assertEqual(mutated_entity.key, key._key)
|
||||
self.assertEqual(connection._committed,
|
||||
[(_PROJECT, batch._commit_request, None)])
|
||||
|
||||
def test_as_context_mgr_nested(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_PROPERTIES = {'foo': 'bar'}
|
||||
connection = _Connection()
|
||||
entity1 = _Entity(_PROPERTIES)
|
||||
key1 = entity1.key = _Key(_PROJECT)
|
||||
entity2 = _Entity(_PROPERTIES)
|
||||
key2 = entity2.key = _Key(_PROJECT)
|
||||
|
||||
client = _Client(_PROJECT, connection)
|
||||
self.assertEqual(list(client._batches), [])
|
||||
|
||||
with self._makeOne(client) as batch1:
|
||||
self.assertEqual(list(client._batches), [batch1])
|
||||
batch1.put(entity1)
|
||||
with self._makeOne(client) as batch2:
|
||||
self.assertEqual(list(client._batches), [batch2, batch1])
|
||||
batch2.put(entity2)
|
||||
|
||||
self.assertEqual(list(client._batches), [batch1])
|
||||
|
||||
self.assertEqual(list(client._batches), [])
|
||||
|
||||
mutated_entity1 = _mutated_pb(self, batch1.mutations, 'upsert')
|
||||
self.assertEqual(mutated_entity1.key, key1._key)
|
||||
|
||||
mutated_entity2 = _mutated_pb(self, batch2.mutations, 'upsert')
|
||||
self.assertEqual(mutated_entity2.key, key2._key)
|
||||
|
||||
self.assertEqual(connection._committed,
|
||||
[(_PROJECT, batch2._commit_request, None),
|
||||
(_PROJECT, batch1._commit_request, None)])
|
||||
|
||||
def test_as_context_mgr_w_error(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_PROPERTIES = {'foo': 'bar'}
|
||||
connection = _Connection()
|
||||
entity = _Entity(_PROPERTIES)
|
||||
key = entity.key = _Key(_PROJECT)
|
||||
|
||||
client = _Client(_PROJECT, connection)
|
||||
self.assertEqual(list(client._batches), [])
|
||||
|
||||
try:
|
||||
with self._makeOne(client) as batch:
|
||||
self.assertEqual(list(client._batches), [batch])
|
||||
batch.put(entity)
|
||||
raise ValueError("testing")
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.assertEqual(list(client._batches), [])
|
||||
|
||||
mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
|
||||
self.assertEqual(mutated_entity.key, key._key)
|
||||
self.assertEqual(connection._committed, [])
|
||||
|
||||
|
||||
class _PathElementPB(object):
|
||||
|
||||
def __init__(self, id_):
|
||||
self.id = id_
|
||||
|
||||
|
||||
class _KeyPB(object):
|
||||
|
||||
def __init__(self, id_):
|
||||
self.path = [_PathElementPB(id_)]
|
||||
|
||||
|
||||
class _Connection(object):
|
||||
_marker = object()
|
||||
_save_result = (False, None)
|
||||
|
||||
def __init__(self, *new_keys):
|
||||
self._completed_keys = [_KeyPB(key) for key in new_keys]
|
||||
self._committed = []
|
||||
self._index_updates = 0
|
||||
|
||||
def commit(self, project, commit_request, transaction_id):
|
||||
self._committed.append((project, commit_request, transaction_id))
|
||||
return self._index_updates, self._completed_keys
|
||||
|
||||
|
||||
class _Entity(dict):
|
||||
key = None
|
||||
exclude_from_indexes = ()
|
||||
_meanings = {}
|
||||
|
||||
|
||||
class _Key(object):
|
||||
_MARKER = object()
|
||||
_kind = 'KIND'
|
||||
_key = 'KEY'
|
||||
_path = None
|
||||
_id = 1234
|
||||
_stored = None
|
||||
|
||||
def __init__(self, project):
|
||||
self.project = project
|
||||
|
||||
@property
|
||||
def is_partial(self):
|
||||
return self._id is None
|
||||
|
||||
def to_protobuf(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
key = self._key = entity_pb2.Key()
|
||||
# Don't assign it, because it will just get ripped out
|
||||
# key.partition_id.project_id = self.project
|
||||
|
||||
element = key.path.add()
|
||||
element.kind = self._kind
|
||||
if self._id is not None:
|
||||
element.id = self._id
|
||||
|
||||
return key
|
||||
|
||||
def completed_key(self, new_id):
|
||||
assert self.is_partial
|
||||
new_key = self.__class__(self.project)
|
||||
new_key._id = new_id
|
||||
return new_key
|
||||
|
||||
|
||||
class _Client(object):
|
||||
|
||||
def __init__(self, project, connection, namespace=None):
|
||||
self.project = project
|
||||
self.connection = connection
|
||||
self.namespace = namespace
|
||||
self._batches = []
|
||||
|
||||
def _push_batch(self, batch):
|
||||
self._batches.insert(0, batch)
|
||||
|
||||
def _pop_batch(self):
|
||||
return self._batches.pop(0)
|
||||
|
||||
@property
|
||||
def current_batch(self):
|
||||
if self._batches:
|
||||
return self._batches[0]
|
||||
|
||||
|
||||
def _assert_num_mutations(test_case, mutation_pb_list, num_mutations):
|
||||
test_case.assertEqual(len(mutation_pb_list), num_mutations)
|
||||
|
||||
|
||||
def _mutated_pb(test_case, mutation_pb_list, mutation_type):
|
||||
# Make sure there is only one mutation.
|
||||
_assert_num_mutations(test_case, mutation_pb_list, 1)
|
||||
|
||||
# We grab the only mutation.
|
||||
mutated_pb = mutation_pb_list[0]
|
||||
# Then check if it is the correct type.
|
||||
test_case.assertEqual(mutated_pb.WhichOneof('operation'),
|
||||
mutation_type)
|
||||
|
||||
return getattr(mutated_pb, mutation_type)
|
1006
venv/Lib/site-packages/gcloud/datastore/test_client.py
Normal file
1006
venv/Lib/site-packages/gcloud/datastore/test_client.py
Normal file
File diff suppressed because it is too large
Load diff
873
venv/Lib/site-packages/gcloud/datastore/test_connection.py
Normal file
873
venv/Lib/site-packages/gcloud/datastore/test_connection.py
Normal file
|
@ -0,0 +1,873 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestConnection(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.connection import Connection
|
||||
|
||||
return Connection
|
||||
|
||||
def _make_key_pb(self, project, id_=1234):
|
||||
from gcloud.datastore.key import Key
|
||||
path_args = ('Kind',)
|
||||
if id_ is not None:
|
||||
path_args += (id_,)
|
||||
return Key(*path_args, project=project).to_protobuf()
|
||||
|
||||
def _make_query_pb(self, kind):
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
pb = query_pb2.Query()
|
||||
pb.kind.add().name = kind
|
||||
return pb
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def _verifyProtobufCall(self, called_with, URI, conn):
|
||||
self.assertEqual(called_with['uri'], URI)
|
||||
self.assertEqual(called_with['method'], 'POST')
|
||||
self.assertEqual(called_with['headers']['Content-Type'],
|
||||
'application/x-protobuf')
|
||||
self.assertEqual(called_with['headers']['User-Agent'],
|
||||
conn.USER_AGENT)
|
||||
|
||||
def test_default_url(self):
|
||||
klass = self._getTargetClass()
|
||||
conn = self._makeOne()
|
||||
self.assertEqual(conn.api_base_url, klass.API_BASE_URL)
|
||||
|
||||
def test_custom_url_from_env(self):
|
||||
import os
|
||||
from gcloud._testing import _Monkey
|
||||
from gcloud.connection import API_BASE_URL
|
||||
from gcloud.environment_vars import GCD_HOST
|
||||
|
||||
HOST = 'CURR_HOST'
|
||||
fake_environ = {GCD_HOST: HOST}
|
||||
|
||||
with _Monkey(os, environ=fake_environ):
|
||||
conn = self._makeOne()
|
||||
|
||||
self.assertNotEqual(conn.api_base_url, API_BASE_URL)
|
||||
self.assertEqual(conn.api_base_url, HOST + '/datastore')
|
||||
|
||||
def test_custom_url_from_constructor(self):
|
||||
from gcloud.connection import API_BASE_URL
|
||||
|
||||
HOST = object()
|
||||
conn = self._makeOne(api_base_url=HOST)
|
||||
self.assertNotEqual(conn.api_base_url, API_BASE_URL)
|
||||
self.assertEqual(conn.api_base_url, HOST)
|
||||
|
||||
def test_custom_url_constructor_and_env(self):
|
||||
import os
|
||||
from gcloud._testing import _Monkey
|
||||
from gcloud.connection import API_BASE_URL
|
||||
from gcloud.environment_vars import GCD_HOST
|
||||
|
||||
HOST1 = object()
|
||||
HOST2 = object()
|
||||
fake_environ = {GCD_HOST: HOST1}
|
||||
|
||||
with _Monkey(os, environ=fake_environ):
|
||||
conn = self._makeOne(api_base_url=HOST2)
|
||||
|
||||
self.assertNotEqual(conn.api_base_url, API_BASE_URL)
|
||||
self.assertNotEqual(conn.api_base_url, HOST1)
|
||||
self.assertEqual(conn.api_base_url, HOST2)
|
||||
|
||||
def test_ctor_defaults(self):
|
||||
conn = self._makeOne()
|
||||
self.assertEqual(conn.credentials, None)
|
||||
|
||||
def test_ctor_explicit(self):
|
||||
class Creds(object):
|
||||
|
||||
def create_scoped_required(self):
|
||||
return False
|
||||
|
||||
creds = Creds()
|
||||
conn = self._makeOne(creds)
|
||||
self.assertTrue(conn.credentials is creds)
|
||||
|
||||
def test_http_w_existing(self):
|
||||
conn = self._makeOne()
|
||||
conn._http = http = object()
|
||||
self.assertTrue(conn.http is http)
|
||||
|
||||
def test_http_wo_creds(self):
|
||||
import httplib2
|
||||
|
||||
conn = self._makeOne()
|
||||
self.assertTrue(isinstance(conn.http, httplib2.Http))
|
||||
|
||||
def test_http_w_creds(self):
|
||||
import httplib2
|
||||
|
||||
authorized = object()
|
||||
|
||||
class Creds(object):
|
||||
|
||||
def authorize(self, http):
|
||||
self._called_with = http
|
||||
return authorized
|
||||
|
||||
def create_scoped_required(self):
|
||||
return False
|
||||
|
||||
creds = Creds()
|
||||
conn = self._makeOne(creds)
|
||||
self.assertTrue(conn.http is authorized)
|
||||
self.assertTrue(isinstance(creds._called_with, httplib2.Http))
|
||||
|
||||
def test__request_w_200(self):
|
||||
PROJECT = 'PROJECT'
|
||||
METHOD = 'METHOD'
|
||||
DATA = b'DATA'
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':' + METHOD,
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, 'CONTENT')
|
||||
self.assertEqual(conn._request(PROJECT, METHOD, DATA), 'CONTENT')
|
||||
self._verifyProtobufCall(http._called_with, URI, conn)
|
||||
self.assertEqual(http._called_with['body'], DATA)
|
||||
|
||||
def test__request_not_200(self):
|
||||
from gcloud.exceptions import BadRequest
|
||||
from google.rpc import status_pb2
|
||||
|
||||
error = status_pb2.Status()
|
||||
error.message = 'Entity value is indexed.'
|
||||
error.code = 9 # FAILED_PRECONDITION
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
METHOD = 'METHOD'
|
||||
DATA = 'DATA'
|
||||
conn = self._makeOne()
|
||||
conn._http = Http({'status': '400'}, error.SerializeToString())
|
||||
with self.assertRaises(BadRequest) as e:
|
||||
conn._request(PROJECT, METHOD, DATA)
|
||||
expected_message = '400 Entity value is indexed.'
|
||||
self.assertEqual(str(e.exception), expected_message)
|
||||
|
||||
def test__rpc(self):
|
||||
|
||||
class ReqPB(object):
|
||||
|
||||
def SerializeToString(self):
|
||||
return REQPB
|
||||
|
||||
class RspPB(object):
|
||||
|
||||
def __init__(self, pb):
|
||||
self._pb = pb
|
||||
|
||||
@classmethod
|
||||
def FromString(cls, pb):
|
||||
return cls(pb)
|
||||
|
||||
REQPB = b'REQPB'
|
||||
PROJECT = 'PROJECT'
|
||||
METHOD = 'METHOD'
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':' + METHOD,
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, 'CONTENT')
|
||||
response = conn._rpc(PROJECT, METHOD, ReqPB(), RspPB)
|
||||
self.assertTrue(isinstance(response, RspPB))
|
||||
self.assertEqual(response._pb, 'CONTENT')
|
||||
self._verifyProtobufCall(http._called_with, URI, conn)
|
||||
self.assertEqual(http._called_with['body'], REQPB)
|
||||
|
||||
def test_build_api_url_w_default_base_version(self):
|
||||
PROJECT = 'PROJECT'
|
||||
METHOD = 'METHOD'
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':' + METHOD,
|
||||
])
|
||||
self.assertEqual(conn.build_api_url(PROJECT, METHOD), URI)
|
||||
|
||||
def test_build_api_url_w_explicit_base_version(self):
|
||||
BASE = 'http://example.com/'
|
||||
VER = '3.1415926'
|
||||
PROJECT = 'PROJECT'
|
||||
METHOD = 'METHOD'
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
BASE,
|
||||
VER,
|
||||
'projects',
|
||||
PROJECT + ':' + METHOD,
|
||||
])
|
||||
self.assertEqual(conn.build_api_url(PROJECT, METHOD, BASE, VER),
|
||||
URI)
|
||||
|
||||
def test_lookup_single_key_empty_response(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb = self._make_key_pb(PROJECT)
|
||||
rsp_pb = datastore_pb2.LookupResponse()
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':lookup',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
found, missing, deferred = conn.lookup(PROJECT, [key_pb])
|
||||
self.assertEqual(len(found), 0)
|
||||
self.assertEqual(len(missing), 0)
|
||||
self.assertEqual(len(deferred), 0)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.LookupRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
keys = list(request.keys)
|
||||
self.assertEqual(len(keys), 1)
|
||||
self.assertEqual(key_pb, keys[0])
|
||||
|
||||
def test_lookup_single_key_empty_response_w_eventual(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb = self._make_key_pb(PROJECT)
|
||||
rsp_pb = datastore_pb2.LookupResponse()
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':lookup',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
found, missing, deferred = conn.lookup(PROJECT, [key_pb],
|
||||
eventual=True)
|
||||
self.assertEqual(len(found), 0)
|
||||
self.assertEqual(len(missing), 0)
|
||||
self.assertEqual(len(deferred), 0)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.LookupRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
keys = list(request.keys)
|
||||
self.assertEqual(len(keys), 1)
|
||||
self.assertEqual(key_pb, keys[0])
|
||||
self.assertEqual(request.read_options.read_consistency,
|
||||
datastore_pb2.ReadOptions.EVENTUAL)
|
||||
self.assertEqual(request.read_options.transaction, b'')
|
||||
|
||||
def test_lookup_single_key_empty_response_w_eventual_and_transaction(self):
|
||||
PROJECT = 'PROJECT'
|
||||
TRANSACTION = b'TRANSACTION'
|
||||
key_pb = self._make_key_pb(PROJECT)
|
||||
conn = self._makeOne()
|
||||
self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb,
|
||||
eventual=True, transaction_id=TRANSACTION)
|
||||
|
||||
def test_lookup_single_key_empty_response_w_transaction(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
TRANSACTION = b'TRANSACTION'
|
||||
key_pb = self._make_key_pb(PROJECT)
|
||||
rsp_pb = datastore_pb2.LookupResponse()
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':lookup',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
found, missing, deferred = conn.lookup(PROJECT, [key_pb],
|
||||
transaction_id=TRANSACTION)
|
||||
self.assertEqual(len(found), 0)
|
||||
self.assertEqual(len(missing), 0)
|
||||
self.assertEqual(len(deferred), 0)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.LookupRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
keys = list(request.keys)
|
||||
self.assertEqual(len(keys), 1)
|
||||
self.assertEqual(key_pb, keys[0])
|
||||
self.assertEqual(request.read_options.transaction, TRANSACTION)
|
||||
|
||||
def test_lookup_single_key_nonempty_response(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb = self._make_key_pb(PROJECT)
|
||||
rsp_pb = datastore_pb2.LookupResponse()
|
||||
entity = entity_pb2.Entity()
|
||||
entity.key.CopyFrom(key_pb)
|
||||
rsp_pb.found.add(entity=entity)
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':lookup',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
(found,), missing, deferred = conn.lookup(PROJECT, [key_pb])
|
||||
self.assertEqual(len(missing), 0)
|
||||
self.assertEqual(len(deferred), 0)
|
||||
self.assertEqual(found.key.path[0].kind, 'Kind')
|
||||
self.assertEqual(found.key.path[0].id, 1234)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.LookupRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
keys = list(request.keys)
|
||||
self.assertEqual(len(keys), 1)
|
||||
self.assertEqual(key_pb, keys[0])
|
||||
|
||||
def test_lookup_multiple_keys_empty_response(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb1 = self._make_key_pb(PROJECT)
|
||||
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
|
||||
rsp_pb = datastore_pb2.LookupResponse()
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':lookup',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
|
||||
self.assertEqual(len(found), 0)
|
||||
self.assertEqual(len(missing), 0)
|
||||
self.assertEqual(len(deferred), 0)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.LookupRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
keys = list(request.keys)
|
||||
self.assertEqual(len(keys), 2)
|
||||
self.assertEqual(key_pb1, keys[0])
|
||||
self.assertEqual(key_pb2, keys[1])
|
||||
|
||||
def test_lookup_multiple_keys_w_missing(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb1 = self._make_key_pb(PROJECT)
|
||||
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
|
||||
rsp_pb = datastore_pb2.LookupResponse()
|
||||
er_1 = rsp_pb.missing.add()
|
||||
er_1.entity.key.CopyFrom(key_pb1)
|
||||
er_2 = rsp_pb.missing.add()
|
||||
er_2.entity.key.CopyFrom(key_pb2)
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':lookup',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
|
||||
self.assertEqual(result, [])
|
||||
self.assertEqual(len(deferred), 0)
|
||||
self.assertEqual([missed.key for missed in missing],
|
||||
[key_pb1, key_pb2])
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.LookupRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
keys = list(request.keys)
|
||||
self.assertEqual(len(keys), 2)
|
||||
self.assertEqual(key_pb1, keys[0])
|
||||
self.assertEqual(key_pb2, keys[1])
|
||||
|
||||
def test_lookup_multiple_keys_w_deferred(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb1 = self._make_key_pb(PROJECT)
|
||||
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
|
||||
rsp_pb = datastore_pb2.LookupResponse()
|
||||
rsp_pb.deferred.add().CopyFrom(key_pb1)
|
||||
rsp_pb.deferred.add().CopyFrom(key_pb2)
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':lookup',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
|
||||
self.assertEqual(result, [])
|
||||
self.assertEqual(len(missing), 0)
|
||||
self.assertEqual([def_key for def_key in deferred], [key_pb1, key_pb2])
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
self.assertEqual(cw['uri'], URI)
|
||||
self.assertEqual(cw['method'], 'POST')
|
||||
self.assertEqual(cw['headers']['Content-Type'],
|
||||
'application/x-protobuf')
|
||||
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
|
||||
rq_class = datastore_pb2.LookupRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
keys = list(request.keys)
|
||||
self.assertEqual(len(keys), 2)
|
||||
self.assertEqual(key_pb1, keys[0])
|
||||
self.assertEqual(key_pb2, keys[1])
|
||||
|
||||
def test_run_query_w_eventual_no_transaction(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
KIND = 'Nonesuch'
|
||||
CURSOR = b'\x00'
|
||||
q_pb = self._make_query_pb(KIND)
|
||||
rsp_pb = datastore_pb2.RunQueryResponse()
|
||||
rsp_pb.batch.end_cursor = CURSOR
|
||||
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
|
||||
rsp_pb.batch.more_results = no_more
|
||||
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':runQuery',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
pbs, end, more, skipped = conn.run_query(PROJECT, q_pb,
|
||||
eventual=True)
|
||||
self.assertEqual(pbs, [])
|
||||
self.assertEqual(end, CURSOR)
|
||||
self.assertTrue(more)
|
||||
self.assertEqual(skipped, 0)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.RunQueryRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(request.partition_id.namespace_id, '')
|
||||
self.assertEqual(request.query, q_pb)
|
||||
self.assertEqual(request.read_options.read_consistency,
|
||||
datastore_pb2.ReadOptions.EVENTUAL)
|
||||
self.assertEqual(request.read_options.transaction, b'')
|
||||
|
||||
def test_run_query_wo_eventual_w_transaction(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
KIND = 'Nonesuch'
|
||||
CURSOR = b'\x00'
|
||||
TRANSACTION = b'TRANSACTION'
|
||||
q_pb = self._make_query_pb(KIND)
|
||||
rsp_pb = datastore_pb2.RunQueryResponse()
|
||||
rsp_pb.batch.end_cursor = CURSOR
|
||||
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
|
||||
rsp_pb.batch.more_results = no_more
|
||||
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':runQuery',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
pbs, end, more, skipped = conn.run_query(
|
||||
PROJECT, q_pb, transaction_id=TRANSACTION)
|
||||
self.assertEqual(pbs, [])
|
||||
self.assertEqual(end, CURSOR)
|
||||
self.assertTrue(more)
|
||||
self.assertEqual(skipped, 0)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.RunQueryRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(request.partition_id.namespace_id, '')
|
||||
self.assertEqual(request.query, q_pb)
|
||||
self.assertEqual(
|
||||
request.read_options.read_consistency,
|
||||
datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED)
|
||||
self.assertEqual(request.read_options.transaction, TRANSACTION)
|
||||
|
||||
def test_run_query_w_eventual_and_transaction(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
KIND = 'Nonesuch'
|
||||
CURSOR = b'\x00'
|
||||
TRANSACTION = b'TRANSACTION'
|
||||
q_pb = self._make_query_pb(KIND)
|
||||
rsp_pb = datastore_pb2.RunQueryResponse()
|
||||
rsp_pb.batch.end_cursor = CURSOR
|
||||
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
|
||||
rsp_pb.batch.more_results = no_more
|
||||
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
|
||||
conn = self._makeOne()
|
||||
self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb,
|
||||
eventual=True, transaction_id=TRANSACTION)
|
||||
|
||||
def test_run_query_wo_namespace_empty_result(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
KIND = 'Nonesuch'
|
||||
CURSOR = b'\x00'
|
||||
q_pb = self._make_query_pb(KIND)
|
||||
rsp_pb = datastore_pb2.RunQueryResponse()
|
||||
rsp_pb.batch.end_cursor = CURSOR
|
||||
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
|
||||
rsp_pb.batch.more_results = no_more
|
||||
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':runQuery',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
pbs, end, more, skipped = conn.run_query(PROJECT, q_pb)
|
||||
self.assertEqual(pbs, [])
|
||||
self.assertEqual(end, CURSOR)
|
||||
self.assertTrue(more)
|
||||
self.assertEqual(skipped, 0)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.RunQueryRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(request.partition_id.namespace_id, '')
|
||||
self.assertEqual(request.query, q_pb)
|
||||
|
||||
def test_run_query_w_namespace_nonempty_result(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
KIND = 'Kind'
|
||||
entity = entity_pb2.Entity()
|
||||
q_pb = self._make_query_pb(KIND)
|
||||
rsp_pb = datastore_pb2.RunQueryResponse()
|
||||
rsp_pb.batch.entity_results.add(entity=entity)
|
||||
rsp_pb.batch.entity_result_type = 1 # FULL
|
||||
rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':runQuery',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
pbs = conn.run_query(PROJECT, q_pb, 'NS')[0]
|
||||
self.assertEqual(len(pbs), 1)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.RunQueryRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(request.partition_id.namespace_id, 'NS')
|
||||
self.assertEqual(request.query, q_pb)
|
||||
|
||||
def test_begin_transaction(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
TRANSACTION = b'TRANSACTION'
|
||||
rsp_pb = datastore_pb2.BeginTransactionResponse()
|
||||
rsp_pb.transaction = TRANSACTION
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':beginTransaction',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.BeginTransactionRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
|
||||
def test_commit_wo_transaction(self):
|
||||
from gcloud._testing import _Monkey
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore import connection as MUT
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb = self._make_key_pb(PROJECT)
|
||||
rsp_pb = datastore_pb2.CommitResponse()
|
||||
req_pb = datastore_pb2.CommitRequest()
|
||||
mutation = req_pb.mutations.add()
|
||||
insert = mutation.upsert
|
||||
insert.key.CopyFrom(key_pb)
|
||||
value_pb = _new_value_pb(insert, 'foo')
|
||||
value_pb.string_value = u'Foo'
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':commit',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
|
||||
# Set up mock for parsing the response.
|
||||
expected_result = object()
|
||||
_parsed = []
|
||||
|
||||
def mock_parse(response):
|
||||
_parsed.append(response)
|
||||
return expected_result
|
||||
|
||||
with _Monkey(MUT, _parse_commit_response=mock_parse):
|
||||
result = conn.commit(PROJECT, req_pb, None)
|
||||
|
||||
self.assertTrue(result is expected_result)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.CommitRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(request.transaction, b'')
|
||||
self.assertEqual(list(request.mutations), [mutation])
|
||||
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
|
||||
self.assertEqual(_parsed, [rsp_pb])
|
||||
|
||||
def test_commit_w_transaction(self):
|
||||
from gcloud._testing import _Monkey
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore import connection as MUT
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
key_pb = self._make_key_pb(PROJECT)
|
||||
rsp_pb = datastore_pb2.CommitResponse()
|
||||
req_pb = datastore_pb2.CommitRequest()
|
||||
mutation = req_pb.mutations.add()
|
||||
insert = mutation.upsert
|
||||
insert.key.CopyFrom(key_pb)
|
||||
value_pb = _new_value_pb(insert, 'foo')
|
||||
value_pb.string_value = u'Foo'
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':commit',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
|
||||
# Set up mock for parsing the response.
|
||||
expected_result = object()
|
||||
_parsed = []
|
||||
|
||||
def mock_parse(response):
|
||||
_parsed.append(response)
|
||||
return expected_result
|
||||
|
||||
with _Monkey(MUT, _parse_commit_response=mock_parse):
|
||||
result = conn.commit(PROJECT, req_pb, b'xact')
|
||||
|
||||
self.assertTrue(result is expected_result)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.CommitRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(request.transaction, b'xact')
|
||||
self.assertEqual(list(request.mutations), [mutation])
|
||||
self.assertEqual(request.mode, rq_class.TRANSACTIONAL)
|
||||
self.assertEqual(_parsed, [rsp_pb])
|
||||
|
||||
def test_rollback_ok(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
PROJECT = 'PROJECT'
|
||||
TRANSACTION = b'xact'
|
||||
|
||||
rsp_pb = datastore_pb2.RollbackResponse()
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':rollback',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
self.assertEqual(conn.rollback(PROJECT, TRANSACTION), None)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.RollbackRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(request.transaction, TRANSACTION)
|
||||
|
||||
def test_allocate_ids_empty(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
rsp_pb = datastore_pb2.AllocateIdsResponse()
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':allocateIds',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
self.assertEqual(conn.allocate_ids(PROJECT, []), [])
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.AllocateIdsRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(list(request.keys), [])
|
||||
|
||||
def test_allocate_ids_non_empty(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
PROJECT = 'PROJECT'
|
||||
before_key_pbs = [
|
||||
self._make_key_pb(PROJECT, id_=None),
|
||||
self._make_key_pb(PROJECT, id_=None),
|
||||
]
|
||||
after_key_pbs = [
|
||||
self._make_key_pb(PROJECT),
|
||||
self._make_key_pb(PROJECT, id_=2345),
|
||||
]
|
||||
rsp_pb = datastore_pb2.AllocateIdsResponse()
|
||||
rsp_pb.keys.add().CopyFrom(after_key_pbs[0])
|
||||
rsp_pb.keys.add().CopyFrom(after_key_pbs[1])
|
||||
conn = self._makeOne()
|
||||
URI = '/'.join([
|
||||
conn.api_base_url,
|
||||
conn.API_VERSION,
|
||||
'projects',
|
||||
PROJECT + ':allocateIds',
|
||||
])
|
||||
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
|
||||
self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs),
|
||||
after_key_pbs)
|
||||
cw = http._called_with
|
||||
self._verifyProtobufCall(cw, URI, conn)
|
||||
rq_class = datastore_pb2.AllocateIdsRequest
|
||||
request = rq_class()
|
||||
request.ParseFromString(cw['body'])
|
||||
self.assertEqual(len(request.keys), len(before_key_pbs))
|
||||
for key_before, key_after in zip(before_key_pbs, request.keys):
|
||||
self.assertEqual(key_before, key_after)
|
||||
|
||||
|
||||
class Test__parse_commit_response(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, commit_response_pb):
|
||||
from gcloud.datastore.connection import _parse_commit_response
|
||||
return _parse_commit_response(commit_response_pb)
|
||||
|
||||
def test_it(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
index_updates = 1337
|
||||
keys = [
|
||||
entity_pb2.Key(
|
||||
path=[
|
||||
entity_pb2.Key.PathElement(
|
||||
kind='Foo',
|
||||
id=1234,
|
||||
),
|
||||
],
|
||||
),
|
||||
entity_pb2.Key(
|
||||
path=[
|
||||
entity_pb2.Key.PathElement(
|
||||
kind='Bar',
|
||||
name='baz',
|
||||
),
|
||||
],
|
||||
),
|
||||
]
|
||||
response = datastore_pb2.CommitResponse(
|
||||
mutation_results=[
|
||||
datastore_pb2.MutationResult(key=key) for key in keys
|
||||
],
|
||||
index_updates=index_updates,
|
||||
)
|
||||
result = self._callFUT(response)
|
||||
self.assertEqual(result, (index_updates, keys))
|
||||
|
||||
|
||||
class Http(object):
|
||||
|
||||
_called_with = None
|
||||
|
||||
def __init__(self, headers, content):
|
||||
from httplib2 import Response
|
||||
self._response = Response(headers)
|
||||
self._content = content
|
||||
|
||||
def request(self, **kw):
|
||||
self._called_with = kw
|
||||
return self._response, self._content
|
||||
|
||||
|
||||
class _PathElementProto(object):
|
||||
|
||||
def __init__(self, _id):
|
||||
self.id = _id
|
||||
|
||||
|
||||
class _KeyProto(object):
|
||||
|
||||
def __init__(self, id_):
|
||||
self.path = [_PathElementProto(id_)]
|
211
venv/Lib/site-packages/gcloud/datastore/test_entity.py
Normal file
211
venv/Lib/site-packages/gcloud/datastore/test_entity.py
Normal file
|
@ -0,0 +1,211 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
|
||||
|
||||
class TestEntity(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.entity import Entity
|
||||
return Entity
|
||||
|
||||
def _makeOne(self, key=None, exclude_from_indexes=()):
|
||||
klass = self._getTargetClass()
|
||||
return klass(key=key, exclude_from_indexes=exclude_from_indexes)
|
||||
|
||||
def test_ctor_defaults(self):
|
||||
klass = self._getTargetClass()
|
||||
entity = klass()
|
||||
self.assertEqual(entity.key, None)
|
||||
self.assertEqual(entity.kind, None)
|
||||
self.assertEqual(sorted(entity.exclude_from_indexes), [])
|
||||
|
||||
def test_ctor_explicit(self):
|
||||
_EXCLUDE_FROM_INDEXES = ['foo', 'bar']
|
||||
key = _Key()
|
||||
entity = self._makeOne(
|
||||
key=key, exclude_from_indexes=_EXCLUDE_FROM_INDEXES)
|
||||
self.assertEqual(sorted(entity.exclude_from_indexes),
|
||||
sorted(_EXCLUDE_FROM_INDEXES))
|
||||
|
||||
def test_ctor_bad_exclude_from_indexes(self):
|
||||
BAD_EXCLUDE_FROM_INDEXES = object()
|
||||
key = _Key()
|
||||
self.assertRaises(TypeError, self._makeOne, key=key,
|
||||
exclude_from_indexes=BAD_EXCLUDE_FROM_INDEXES)
|
||||
|
||||
def test___eq_____ne___w_non_entity(self):
|
||||
from gcloud.datastore.key import Key
|
||||
key = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity = self._makeOne(key=key)
|
||||
self.assertFalse(entity == object())
|
||||
self.assertTrue(entity != object())
|
||||
|
||||
def test___eq_____ne___w_different_keys(self):
|
||||
from gcloud.datastore.key import Key
|
||||
_ID1 = 1234
|
||||
_ID2 = 2345
|
||||
key1 = Key(_KIND, _ID1, project=_PROJECT)
|
||||
entity1 = self._makeOne(key=key1)
|
||||
key2 = Key(_KIND, _ID2, project=_PROJECT)
|
||||
entity2 = self._makeOne(key=key2)
|
||||
self.assertFalse(entity1 == entity2)
|
||||
self.assertTrue(entity1 != entity2)
|
||||
|
||||
def test___eq_____ne___w_same_keys(self):
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
name = 'foo'
|
||||
value = 42
|
||||
meaning = 9
|
||||
|
||||
key1 = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity1 = self._makeOne(key=key1, exclude_from_indexes=(name,))
|
||||
entity1[name] = value
|
||||
entity1._meanings[name] = (meaning, value)
|
||||
|
||||
key2 = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity2 = self._makeOne(key=key2, exclude_from_indexes=(name,))
|
||||
entity2[name] = value
|
||||
entity2._meanings[name] = (meaning, value)
|
||||
|
||||
self.assertTrue(entity1 == entity2)
|
||||
self.assertFalse(entity1 != entity2)
|
||||
|
||||
def test___eq_____ne___w_same_keys_different_props(self):
|
||||
from gcloud.datastore.key import Key
|
||||
key1 = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity1 = self._makeOne(key=key1)
|
||||
entity1['foo'] = 'Foo'
|
||||
key2 = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity2 = self._makeOne(key=key2)
|
||||
entity1['bar'] = 'Bar'
|
||||
self.assertFalse(entity1 == entity2)
|
||||
self.assertTrue(entity1 != entity2)
|
||||
|
||||
def test___eq_____ne___w_same_keys_props_w_equiv_keys_as_value(self):
|
||||
from gcloud.datastore.key import Key
|
||||
key1 = Key(_KIND, _ID, project=_PROJECT)
|
||||
key2 = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity1 = self._makeOne(key=key1)
|
||||
entity1['some_key'] = key1
|
||||
entity2 = self._makeOne(key=key1)
|
||||
entity2['some_key'] = key2
|
||||
self.assertTrue(entity1 == entity2)
|
||||
self.assertFalse(entity1 != entity2)
|
||||
|
||||
def test___eq_____ne___w_same_keys_props_w_diff_keys_as_value(self):
|
||||
from gcloud.datastore.key import Key
|
||||
_ID1 = 1234
|
||||
_ID2 = 2345
|
||||
key1 = Key(_KIND, _ID1, project=_PROJECT)
|
||||
key2 = Key(_KIND, _ID2, project=_PROJECT)
|
||||
entity1 = self._makeOne(key=key1)
|
||||
entity1['some_key'] = key1
|
||||
entity2 = self._makeOne(key=key1)
|
||||
entity2['some_key'] = key2
|
||||
self.assertFalse(entity1 == entity2)
|
||||
self.assertTrue(entity1 != entity2)
|
||||
|
||||
def test___eq_____ne___w_same_keys_props_w_equiv_entities_as_value(self):
|
||||
from gcloud.datastore.key import Key
|
||||
key = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity1 = self._makeOne(key=key)
|
||||
sub1 = self._makeOne()
|
||||
sub1.update({'foo': 'Foo'})
|
||||
entity1['some_entity'] = sub1
|
||||
entity2 = self._makeOne(key=key)
|
||||
sub2 = self._makeOne()
|
||||
sub2.update({'foo': 'Foo'})
|
||||
entity2['some_entity'] = sub2
|
||||
self.assertTrue(entity1 == entity2)
|
||||
self.assertFalse(entity1 != entity2)
|
||||
|
||||
def test___eq_____ne___w_same_keys_props_w_diff_entities_as_value(self):
|
||||
from gcloud.datastore.key import Key
|
||||
key = Key(_KIND, _ID, project=_PROJECT)
|
||||
entity1 = self._makeOne(key=key)
|
||||
sub1 = self._makeOne()
|
||||
sub1.update({'foo': 'Foo'})
|
||||
entity1['some_entity'] = sub1
|
||||
entity2 = self._makeOne(key=key)
|
||||
sub2 = self._makeOne()
|
||||
sub2.update({'foo': 'Bar'})
|
||||
entity2['some_entity'] = sub2
|
||||
self.assertFalse(entity1 == entity2)
|
||||
self.assertTrue(entity1 != entity2)
|
||||
|
||||
def test__eq__same_value_different_exclude(self):
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
name = 'foo'
|
||||
value = 42
|
||||
key = Key(_KIND, _ID, project=_PROJECT)
|
||||
|
||||
entity1 = self._makeOne(key=key, exclude_from_indexes=(name,))
|
||||
entity1[name] = value
|
||||
|
||||
entity2 = self._makeOne(key=key, exclude_from_indexes=())
|
||||
entity2[name] = value
|
||||
|
||||
self.assertFalse(entity1 == entity2)
|
||||
|
||||
def test__eq__same_value_different_meanings(self):
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
name = 'foo'
|
||||
value = 42
|
||||
meaning = 9
|
||||
key = Key(_KIND, _ID, project=_PROJECT)
|
||||
|
||||
entity1 = self._makeOne(key=key, exclude_from_indexes=(name,))
|
||||
entity1[name] = value
|
||||
|
||||
entity2 = self._makeOne(key=key, exclude_from_indexes=(name,))
|
||||
entity2[name] = value
|
||||
entity2._meanings[name] = (meaning, value)
|
||||
|
||||
self.assertFalse(entity1 == entity2)
|
||||
|
||||
def test___repr___no_key_empty(self):
|
||||
entity = self._makeOne()
|
||||
self.assertEqual(repr(entity), '<Entity {}>')
|
||||
|
||||
def test___repr___w_key_non_empty(self):
|
||||
key = _Key()
|
||||
key._path = '/bar/baz'
|
||||
entity = self._makeOne(key=key)
|
||||
entity['foo'] = 'Foo'
|
||||
self.assertEqual(repr(entity), "<Entity/bar/baz {'foo': 'Foo'}>")
|
||||
|
||||
|
||||
class _Key(object):
|
||||
_MARKER = object()
|
||||
_key = 'KEY'
|
||||
_partial = False
|
||||
_path = None
|
||||
_id = None
|
||||
_stored = None
|
||||
|
||||
def __init__(self, project=_PROJECT):
|
||||
self.project = project
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self._path
|
926
venv/Lib/site-packages/gcloud/datastore/test_helpers.py
Normal file
926
venv/Lib/site-packages/gcloud/datastore/test_helpers.py
Normal file
|
@ -0,0 +1,926 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class Test__new_value_pb(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, entity_pb, name):
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
return _new_value_pb(entity_pb, name)
|
||||
|
||||
def test_it(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
entity_pb = entity_pb2.Entity()
|
||||
name = 'foo'
|
||||
result = self._callFUT(entity_pb, name)
|
||||
|
||||
self.assertTrue(isinstance(result, entity_pb2.Value))
|
||||
self.assertEqual(len(entity_pb.properties), 1)
|
||||
self.assertEqual(entity_pb.properties[name], result)
|
||||
|
||||
|
||||
class Test__property_tuples(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, entity_pb):
|
||||
from gcloud.datastore.helpers import _property_tuples
|
||||
return _property_tuples(entity_pb)
|
||||
|
||||
def test_it(self):
|
||||
import types
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
entity_pb = entity_pb2.Entity()
|
||||
name1 = 'foo'
|
||||
name2 = 'bar'
|
||||
val_pb1 = _new_value_pb(entity_pb, name1)
|
||||
val_pb2 = _new_value_pb(entity_pb, name2)
|
||||
|
||||
result = self._callFUT(entity_pb)
|
||||
self.assertTrue(isinstance(result, types.GeneratorType))
|
||||
self.assertEqual(sorted(result),
|
||||
sorted([(name1, val_pb1), (name2, val_pb2)]))
|
||||
|
||||
|
||||
class Test_entity_from_protobuf(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, val):
|
||||
from gcloud.datastore.helpers import entity_from_protobuf
|
||||
return entity_from_protobuf(val)
|
||||
|
||||
def test_it(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
entity_pb = entity_pb2.Entity()
|
||||
entity_pb.key.partition_id.project_id = _PROJECT
|
||||
entity_pb.key.path.add(kind=_KIND, id=_ID)
|
||||
|
||||
value_pb = _new_value_pb(entity_pb, 'foo')
|
||||
value_pb.string_value = 'Foo'
|
||||
|
||||
unindexed_val_pb = _new_value_pb(entity_pb, 'bar')
|
||||
unindexed_val_pb.integer_value = 10
|
||||
unindexed_val_pb.exclude_from_indexes = True
|
||||
|
||||
array_val_pb1 = _new_value_pb(entity_pb, 'baz')
|
||||
array_pb1 = array_val_pb1.array_value.values
|
||||
|
||||
unindexed_array_val_pb = array_pb1.add()
|
||||
unindexed_array_val_pb.integer_value = 11
|
||||
unindexed_array_val_pb.exclude_from_indexes = True
|
||||
|
||||
array_val_pb2 = _new_value_pb(entity_pb, 'qux')
|
||||
array_pb2 = array_val_pb2.array_value.values
|
||||
|
||||
indexed_array_val_pb = array_pb2.add()
|
||||
indexed_array_val_pb.integer_value = 12
|
||||
|
||||
entity = self._callFUT(entity_pb)
|
||||
self.assertEqual(entity.kind, _KIND)
|
||||
self.assertEqual(entity.exclude_from_indexes,
|
||||
frozenset(['bar', 'baz']))
|
||||
entity_props = dict(entity)
|
||||
self.assertEqual(entity_props,
|
||||
{'foo': 'Foo', 'bar': 10, 'baz': [11], 'qux': [12]})
|
||||
|
||||
# Also check the key.
|
||||
key = entity.key
|
||||
self.assertEqual(key.project, _PROJECT)
|
||||
self.assertEqual(key.namespace, None)
|
||||
self.assertEqual(key.kind, _KIND)
|
||||
self.assertEqual(key.id, _ID)
|
||||
|
||||
def test_mismatched_value_indexed(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
entity_pb = entity_pb2.Entity()
|
||||
entity_pb.key.partition_id.project_id = _PROJECT
|
||||
entity_pb.key.path.add(kind=_KIND, id=_ID)
|
||||
|
||||
array_val_pb = _new_value_pb(entity_pb, 'baz')
|
||||
array_pb = array_val_pb.array_value.values
|
||||
|
||||
unindexed_value_pb1 = array_pb.add()
|
||||
unindexed_value_pb1.integer_value = 10
|
||||
unindexed_value_pb1.exclude_from_indexes = True
|
||||
|
||||
unindexed_value_pb2 = array_pb.add()
|
||||
unindexed_value_pb2.integer_value = 11
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
self._callFUT(entity_pb)
|
||||
|
||||
def test_entity_no_key(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
entity_pb = entity_pb2.Entity()
|
||||
entity = self._callFUT(entity_pb)
|
||||
|
||||
self.assertEqual(entity.key, None)
|
||||
self.assertEqual(dict(entity), {})
|
||||
|
||||
def test_entity_with_meaning(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
entity_pb = entity_pb2.Entity()
|
||||
name = 'hello'
|
||||
value_pb = _new_value_pb(entity_pb, name)
|
||||
value_pb.meaning = meaning = 9
|
||||
value_pb.string_value = val = u'something'
|
||||
|
||||
entity = self._callFUT(entity_pb)
|
||||
self.assertEqual(entity.key, None)
|
||||
self.assertEqual(dict(entity), {name: val})
|
||||
self.assertEqual(entity._meanings, {name: (meaning, val)})
|
||||
|
||||
def test_nested_entity_no_key(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
PROJECT = 'FOO'
|
||||
KIND = 'KIND'
|
||||
INSIDE_NAME = 'IFOO'
|
||||
OUTSIDE_NAME = 'OBAR'
|
||||
INSIDE_VALUE = 1337
|
||||
|
||||
entity_inside = entity_pb2.Entity()
|
||||
inside_val_pb = _new_value_pb(entity_inside, INSIDE_NAME)
|
||||
inside_val_pb.integer_value = INSIDE_VALUE
|
||||
|
||||
entity_pb = entity_pb2.Entity()
|
||||
entity_pb.key.partition_id.project_id = PROJECT
|
||||
element = entity_pb.key.path.add()
|
||||
element.kind = KIND
|
||||
|
||||
outside_val_pb = _new_value_pb(entity_pb, OUTSIDE_NAME)
|
||||
outside_val_pb.entity_value.CopyFrom(entity_inside)
|
||||
|
||||
entity = self._callFUT(entity_pb)
|
||||
self.assertEqual(entity.key.project, PROJECT)
|
||||
self.assertEqual(entity.key.flat_path, (KIND,))
|
||||
self.assertEqual(len(entity), 1)
|
||||
|
||||
inside_entity = entity[OUTSIDE_NAME]
|
||||
self.assertEqual(inside_entity.key, None)
|
||||
self.assertEqual(len(inside_entity), 1)
|
||||
self.assertEqual(inside_entity[INSIDE_NAME], INSIDE_VALUE)
|
||||
|
||||
|
||||
class Test_entity_to_protobuf(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, entity):
|
||||
from gcloud.datastore.helpers import entity_to_protobuf
|
||||
return entity_to_protobuf(entity)
|
||||
|
||||
def _compareEntityProto(self, entity_pb1, entity_pb2):
|
||||
from gcloud.datastore.helpers import _property_tuples
|
||||
|
||||
self.assertEqual(entity_pb1.key, entity_pb2.key)
|
||||
value_list1 = sorted(_property_tuples(entity_pb1))
|
||||
value_list2 = sorted(_property_tuples(entity_pb2))
|
||||
self.assertEqual(len(value_list1), len(value_list2))
|
||||
for pair1, pair2 in zip(value_list1, value_list2):
|
||||
name1, val1 = pair1
|
||||
name2, val2 = pair2
|
||||
self.assertEqual(name1, name2)
|
||||
if val1.HasField('entity_value'): # Message field (Entity)
|
||||
self.assertEqual(val1.meaning, val2.meaning)
|
||||
self._compareEntityProto(val1.entity_value,
|
||||
val2.entity_value)
|
||||
else:
|
||||
self.assertEqual(val1, val2)
|
||||
|
||||
def test_empty(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
|
||||
entity = Entity()
|
||||
entity_pb = self._callFUT(entity)
|
||||
self._compareEntityProto(entity_pb, entity_pb2.Entity())
|
||||
|
||||
def test_key_only(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
kind, name = 'PATH', 'NAME'
|
||||
project = 'PROJECT'
|
||||
key = Key(kind, name, project=project)
|
||||
entity = Entity(key=key)
|
||||
entity_pb = self._callFUT(entity)
|
||||
|
||||
expected_pb = entity_pb2.Entity()
|
||||
expected_pb.key.partition_id.project_id = project
|
||||
path_elt = expected_pb.key.path.add()
|
||||
path_elt.kind = kind
|
||||
path_elt.name = name
|
||||
|
||||
self._compareEntityProto(entity_pb, expected_pb)
|
||||
|
||||
def test_simple_fields(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
entity = Entity()
|
||||
name1 = 'foo'
|
||||
entity[name1] = value1 = 42
|
||||
name2 = 'bar'
|
||||
entity[name2] = value2 = u'some-string'
|
||||
entity_pb = self._callFUT(entity)
|
||||
|
||||
expected_pb = entity_pb2.Entity()
|
||||
val_pb1 = _new_value_pb(expected_pb, name1)
|
||||
val_pb1.integer_value = value1
|
||||
val_pb2 = _new_value_pb(expected_pb, name2)
|
||||
val_pb2.string_value = value2
|
||||
|
||||
self._compareEntityProto(entity_pb, expected_pb)
|
||||
|
||||
def test_with_empty_list(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
|
||||
entity = Entity()
|
||||
entity['foo'] = []
|
||||
entity_pb = self._callFUT(entity)
|
||||
|
||||
self._compareEntityProto(entity_pb, entity_pb2.Entity())
|
||||
|
||||
def test_inverts_to_protobuf(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
from gcloud.datastore.helpers import entity_from_protobuf
|
||||
|
||||
original_pb = entity_pb2.Entity()
|
||||
# Add a key.
|
||||
original_pb.key.partition_id.project_id = project = 'PROJECT'
|
||||
elem1 = original_pb.key.path.add()
|
||||
elem1.kind = 'Family'
|
||||
elem1.id = 1234
|
||||
elem2 = original_pb.key.path.add()
|
||||
elem2.kind = 'King'
|
||||
elem2.name = 'Spades'
|
||||
|
||||
# Add an integer property.
|
||||
val_pb1 = _new_value_pb(original_pb, 'foo')
|
||||
val_pb1.integer_value = 1337
|
||||
val_pb1.exclude_from_indexes = True
|
||||
# Add a string property.
|
||||
val_pb2 = _new_value_pb(original_pb, 'bar')
|
||||
val_pb2.string_value = u'hello'
|
||||
|
||||
# Add a nested (entity) property.
|
||||
val_pb3 = _new_value_pb(original_pb, 'entity-baz')
|
||||
sub_pb = entity_pb2.Entity()
|
||||
sub_val_pb1 = _new_value_pb(sub_pb, 'x')
|
||||
sub_val_pb1.double_value = 3.14
|
||||
sub_val_pb2 = _new_value_pb(sub_pb, 'y')
|
||||
sub_val_pb2.double_value = 2.718281828
|
||||
val_pb3.meaning = 9
|
||||
val_pb3.entity_value.CopyFrom(sub_pb)
|
||||
|
||||
# Add a list property.
|
||||
val_pb4 = _new_value_pb(original_pb, 'list-quux')
|
||||
array_val1 = val_pb4.array_value.values.add()
|
||||
array_val1.exclude_from_indexes = False
|
||||
array_val1.meaning = meaning = 22
|
||||
array_val1.blob_value = b'\xe2\x98\x83'
|
||||
array_val2 = val_pb4.array_value.values.add()
|
||||
array_val2.exclude_from_indexes = False
|
||||
array_val2.meaning = meaning
|
||||
array_val2.blob_value = b'\xe2\x98\x85'
|
||||
|
||||
# Convert to the user-space Entity.
|
||||
entity = entity_from_protobuf(original_pb)
|
||||
# Convert the user-space Entity back to a protobuf.
|
||||
new_pb = self._callFUT(entity)
|
||||
|
||||
# NOTE: entity_to_protobuf() strips the project so we "cheat".
|
||||
new_pb.key.partition_id.project_id = project
|
||||
self._compareEntityProto(original_pb, new_pb)
|
||||
|
||||
def test_meaning_with_change(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
entity = Entity()
|
||||
name = 'foo'
|
||||
entity[name] = value = 42
|
||||
entity._meanings[name] = (9, 1337)
|
||||
entity_pb = self._callFUT(entity)
|
||||
|
||||
expected_pb = entity_pb2.Entity()
|
||||
value_pb = _new_value_pb(expected_pb, name)
|
||||
value_pb.integer_value = value
|
||||
# NOTE: No meaning is used since the value differs from the
|
||||
# value stored.
|
||||
self._compareEntityProto(entity_pb, expected_pb)
|
||||
|
||||
def test_variable_meanings(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
entity = Entity()
|
||||
name = 'quux'
|
||||
entity[name] = values = [1, 20, 300]
|
||||
meaning = 9
|
||||
entity._meanings[name] = ([None, meaning, None], values)
|
||||
entity_pb = self._callFUT(entity)
|
||||
|
||||
# Construct the expected protobuf.
|
||||
expected_pb = entity_pb2.Entity()
|
||||
value_pb = _new_value_pb(expected_pb, name)
|
||||
value0 = value_pb.array_value.values.add()
|
||||
value0.integer_value = values[0]
|
||||
# The only array entry with a meaning is the middle one.
|
||||
value1 = value_pb.array_value.values.add()
|
||||
value1.integer_value = values[1]
|
||||
value1.meaning = meaning
|
||||
value2 = value_pb.array_value.values.add()
|
||||
value2.integer_value = values[2]
|
||||
|
||||
self._compareEntityProto(entity_pb, expected_pb)
|
||||
|
||||
|
||||
class Test_key_from_protobuf(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, val):
|
||||
from gcloud.datastore.helpers import key_from_protobuf
|
||||
|
||||
return key_from_protobuf(val)
|
||||
|
||||
def _makePB(self, project=None, namespace=None, path=()):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
pb = entity_pb2.Key()
|
||||
if project is not None:
|
||||
pb.partition_id.project_id = project
|
||||
if namespace is not None:
|
||||
pb.partition_id.namespace_id = namespace
|
||||
for elem in path:
|
||||
added = pb.path.add()
|
||||
added.kind = elem['kind']
|
||||
if 'id' in elem:
|
||||
added.id = elem['id']
|
||||
if 'name' in elem:
|
||||
added.name = elem['name']
|
||||
return pb
|
||||
|
||||
def test_wo_namespace_in_pb(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
pb = self._makePB(path=[{'kind': 'KIND'}], project=_PROJECT)
|
||||
key = self._callFUT(pb)
|
||||
self.assertEqual(key.project, _PROJECT)
|
||||
self.assertEqual(key.namespace, None)
|
||||
|
||||
def test_w_namespace_in_pb(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_NAMESPACE = 'NAMESPACE'
|
||||
pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE,
|
||||
project=_PROJECT)
|
||||
key = self._callFUT(pb)
|
||||
self.assertEqual(key.project, _PROJECT)
|
||||
self.assertEqual(key.namespace, _NAMESPACE)
|
||||
|
||||
def test_w_nested_path_in_pb(self):
|
||||
_PATH = [
|
||||
{'kind': 'PARENT', 'name': 'NAME'},
|
||||
{'kind': 'CHILD', 'id': 1234},
|
||||
{'kind': 'GRANDCHILD', 'id': 5678},
|
||||
]
|
||||
pb = self._makePB(path=_PATH, project='PROJECT')
|
||||
key = self._callFUT(pb)
|
||||
self.assertEqual(key.path, _PATH)
|
||||
|
||||
def test_w_nothing_in_pb(self):
|
||||
pb = self._makePB()
|
||||
self.assertRaises(ValueError, self._callFUT, pb)
|
||||
|
||||
|
||||
class Test__pb_attr_value(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, val):
|
||||
from gcloud.datastore.helpers import _pb_attr_value
|
||||
|
||||
return _pb_attr_value(val)
|
||||
|
||||
def test_datetime_naive(self):
|
||||
import calendar
|
||||
import datetime
|
||||
from gcloud._helpers import UTC
|
||||
|
||||
micros = 4375
|
||||
naive = datetime.datetime(2014, 9, 16, 10, 19, 32, micros) # No zone.
|
||||
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
|
||||
name, value = self._callFUT(naive)
|
||||
self.assertEqual(name, 'timestamp_value')
|
||||
self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
|
||||
self.assertEqual(value.nanos, 1000 * micros)
|
||||
|
||||
def test_datetime_w_zone(self):
|
||||
import calendar
|
||||
import datetime
|
||||
from gcloud._helpers import UTC
|
||||
|
||||
micros = 4375
|
||||
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
|
||||
name, value = self._callFUT(utc)
|
||||
self.assertEqual(name, 'timestamp_value')
|
||||
self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
|
||||
self.assertEqual(value.nanos, 1000 * micros)
|
||||
|
||||
def test_key(self):
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
key = Key('PATH', 1234, project='PROJECT')
|
||||
name, value = self._callFUT(key)
|
||||
self.assertEqual(name, 'key_value')
|
||||
self.assertEqual(value, key.to_protobuf())
|
||||
|
||||
def test_bool(self):
|
||||
name, value = self._callFUT(False)
|
||||
self.assertEqual(name, 'boolean_value')
|
||||
self.assertEqual(value, False)
|
||||
|
||||
def test_float(self):
|
||||
name, value = self._callFUT(3.1415926)
|
||||
self.assertEqual(name, 'double_value')
|
||||
self.assertEqual(value, 3.1415926)
|
||||
|
||||
def test_int(self):
|
||||
name, value = self._callFUT(42)
|
||||
self.assertEqual(name, 'integer_value')
|
||||
self.assertEqual(value, 42)
|
||||
|
||||
def test_long(self):
|
||||
must_be_long = (1 << 63) - 1
|
||||
name, value = self._callFUT(must_be_long)
|
||||
self.assertEqual(name, 'integer_value')
|
||||
self.assertEqual(value, must_be_long)
|
||||
|
||||
def test_native_str(self):
|
||||
import six
|
||||
name, value = self._callFUT('str')
|
||||
if six.PY2:
|
||||
self.assertEqual(name, 'blob_value')
|
||||
else: # pragma: NO COVER Python 3
|
||||
self.assertEqual(name, 'string_value')
|
||||
self.assertEqual(value, 'str')
|
||||
|
||||
def test_bytes(self):
|
||||
name, value = self._callFUT(b'bytes')
|
||||
self.assertEqual(name, 'blob_value')
|
||||
self.assertEqual(value, b'bytes')
|
||||
|
||||
def test_unicode(self):
|
||||
name, value = self._callFUT(u'str')
|
||||
self.assertEqual(name, 'string_value')
|
||||
self.assertEqual(value, u'str')
|
||||
|
||||
def test_entity(self):
|
||||
from gcloud.datastore.entity import Entity
|
||||
entity = Entity()
|
||||
name, value = self._callFUT(entity)
|
||||
self.assertEqual(name, 'entity_value')
|
||||
self.assertTrue(value is entity)
|
||||
|
||||
def test_array(self):
|
||||
values = ['a', 0, 3.14]
|
||||
name, value = self._callFUT(values)
|
||||
self.assertEqual(name, 'array_value')
|
||||
self.assertTrue(value is values)
|
||||
|
||||
def test_geo_point(self):
|
||||
from google.type import latlng_pb2
|
||||
from gcloud.datastore.helpers import GeoPoint
|
||||
|
||||
lat = 42.42
|
||||
lng = 99.0007
|
||||
geo_pt = GeoPoint(latitude=lat, longitude=lng)
|
||||
geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
|
||||
name, value = self._callFUT(geo_pt)
|
||||
self.assertEqual(name, 'geo_point_value')
|
||||
self.assertEqual(value, geo_pt_pb)
|
||||
|
||||
def test_null(self):
|
||||
from google.protobuf import struct_pb2
|
||||
|
||||
name, value = self._callFUT(None)
|
||||
self.assertEqual(name, 'null_value')
|
||||
self.assertEqual(value, struct_pb2.NULL_VALUE)
|
||||
|
||||
def test_object(self):
|
||||
self.assertRaises(ValueError, self._callFUT, object())
|
||||
|
||||
|
||||
class Test__get_value_from_value_pb(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, pb):
|
||||
from gcloud.datastore.helpers import _get_value_from_value_pb
|
||||
|
||||
return _get_value_from_value_pb(pb)
|
||||
|
||||
def _makePB(self, attr_name, value):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
pb = entity_pb2.Value()
|
||||
setattr(pb, attr_name, value)
|
||||
return pb
|
||||
|
||||
def test_datetime(self):
|
||||
import calendar
|
||||
import datetime
|
||||
from gcloud._helpers import UTC
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
micros = 4375
|
||||
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
|
||||
pb = entity_pb2.Value()
|
||||
pb.timestamp_value.seconds = calendar.timegm(utc.timetuple())
|
||||
pb.timestamp_value.nanos = 1000 * micros
|
||||
self.assertEqual(self._callFUT(pb), utc)
|
||||
|
||||
def test_key(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
pb = entity_pb2.Value()
|
||||
expected = Key('KIND', 1234, project='PROJECT').to_protobuf()
|
||||
pb.key_value.CopyFrom(expected)
|
||||
found = self._callFUT(pb)
|
||||
self.assertEqual(found.to_protobuf(), expected)
|
||||
|
||||
def test_bool(self):
|
||||
pb = self._makePB('boolean_value', False)
|
||||
self.assertEqual(self._callFUT(pb), False)
|
||||
|
||||
def test_float(self):
|
||||
pb = self._makePB('double_value', 3.1415926)
|
||||
self.assertEqual(self._callFUT(pb), 3.1415926)
|
||||
|
||||
def test_int(self):
|
||||
pb = self._makePB('integer_value', 42)
|
||||
self.assertEqual(self._callFUT(pb), 42)
|
||||
|
||||
def test_bytes(self):
|
||||
pb = self._makePB('blob_value', b'str')
|
||||
self.assertEqual(self._callFUT(pb), b'str')
|
||||
|
||||
def test_unicode(self):
|
||||
pb = self._makePB('string_value', u'str')
|
||||
self.assertEqual(self._callFUT(pb), u'str')
|
||||
|
||||
def test_entity(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
pb = entity_pb2.Value()
|
||||
entity_pb = pb.entity_value
|
||||
entity_pb.key.path.add(kind='KIND')
|
||||
entity_pb.key.partition_id.project_id = 'PROJECT'
|
||||
|
||||
value_pb = _new_value_pb(entity_pb, 'foo')
|
||||
value_pb.string_value = 'Foo'
|
||||
entity = self._callFUT(pb)
|
||||
self.assertTrue(isinstance(entity, Entity))
|
||||
self.assertEqual(entity['foo'], 'Foo')
|
||||
|
||||
def test_array(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
pb = entity_pb2.Value()
|
||||
array_pb = pb.array_value.values
|
||||
item_pb = array_pb.add()
|
||||
item_pb.string_value = 'Foo'
|
||||
item_pb = array_pb.add()
|
||||
item_pb.string_value = 'Bar'
|
||||
items = self._callFUT(pb)
|
||||
self.assertEqual(items, ['Foo', 'Bar'])
|
||||
|
||||
def test_geo_point(self):
|
||||
from google.type import latlng_pb2
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore.helpers import GeoPoint
|
||||
|
||||
lat = -3.14
|
||||
lng = 13.37
|
||||
geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
|
||||
pb = entity_pb2.Value(geo_point_value=geo_pt_pb)
|
||||
result = self._callFUT(pb)
|
||||
self.assertIsInstance(result, GeoPoint)
|
||||
self.assertEqual(result.latitude, lat)
|
||||
self.assertEqual(result.longitude, lng)
|
||||
|
||||
def test_null(self):
|
||||
from google.protobuf import struct_pb2
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
pb = entity_pb2.Value(null_value=struct_pb2.NULL_VALUE)
|
||||
result = self._callFUT(pb)
|
||||
self.assertIsNone(result)
|
||||
|
||||
def test_unknown(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
pb = entity_pb2.Value()
|
||||
with self.assertRaises(ValueError):
|
||||
self._callFUT(pb)
|
||||
|
||||
|
||||
class Test_set_protobuf_value(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, value_pb, val):
|
||||
from gcloud.datastore.helpers import _set_protobuf_value
|
||||
|
||||
return _set_protobuf_value(value_pb, val)
|
||||
|
||||
def _makePB(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
return entity_pb2.Value()
|
||||
|
||||
def test_datetime(self):
|
||||
import calendar
|
||||
import datetime
|
||||
from gcloud._helpers import UTC
|
||||
|
||||
pb = self._makePB()
|
||||
micros = 4375
|
||||
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, micros, UTC)
|
||||
self._callFUT(pb, utc)
|
||||
value = pb.timestamp_value
|
||||
self.assertEqual(value.seconds, calendar.timegm(utc.timetuple()))
|
||||
self.assertEqual(value.nanos, 1000 * micros)
|
||||
|
||||
def test_key(self):
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
pb = self._makePB()
|
||||
key = Key('KIND', 1234, project='PROJECT')
|
||||
self._callFUT(pb, key)
|
||||
value = pb.key_value
|
||||
self.assertEqual(value, key.to_protobuf())
|
||||
|
||||
def test_none(self):
|
||||
pb = self._makePB()
|
||||
self._callFUT(pb, None)
|
||||
self.assertEqual(pb.WhichOneof('value_type'), 'null_value')
|
||||
|
||||
def test_bool(self):
|
||||
pb = self._makePB()
|
||||
self._callFUT(pb, False)
|
||||
value = pb.boolean_value
|
||||
self.assertEqual(value, False)
|
||||
|
||||
def test_float(self):
|
||||
pb = self._makePB()
|
||||
self._callFUT(pb, 3.1415926)
|
||||
value = pb.double_value
|
||||
self.assertEqual(value, 3.1415926)
|
||||
|
||||
def test_int(self):
|
||||
pb = self._makePB()
|
||||
self._callFUT(pb, 42)
|
||||
value = pb.integer_value
|
||||
self.assertEqual(value, 42)
|
||||
|
||||
def test_long(self):
|
||||
pb = self._makePB()
|
||||
must_be_long = (1 << 63) - 1
|
||||
self._callFUT(pb, must_be_long)
|
||||
value = pb.integer_value
|
||||
self.assertEqual(value, must_be_long)
|
||||
|
||||
def test_native_str(self):
|
||||
import six
|
||||
pb = self._makePB()
|
||||
self._callFUT(pb, 'str')
|
||||
if six.PY2:
|
||||
value = pb.blob_value
|
||||
else: # pragma: NO COVER Python 3
|
||||
value = pb.string_value
|
||||
self.assertEqual(value, 'str')
|
||||
|
||||
def test_bytes(self):
|
||||
pb = self._makePB()
|
||||
self._callFUT(pb, b'str')
|
||||
value = pb.blob_value
|
||||
self.assertEqual(value, b'str')
|
||||
|
||||
def test_unicode(self):
|
||||
pb = self._makePB()
|
||||
self._callFUT(pb, u'str')
|
||||
value = pb.string_value
|
||||
self.assertEqual(value, u'str')
|
||||
|
||||
def test_entity_empty_wo_key(self):
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.helpers import _property_tuples
|
||||
|
||||
pb = self._makePB()
|
||||
entity = Entity()
|
||||
self._callFUT(pb, entity)
|
||||
value = pb.entity_value
|
||||
self.assertEqual(value.key.SerializeToString(), b'')
|
||||
self.assertEqual(len(list(_property_tuples(value))), 0)
|
||||
|
||||
def test_entity_w_key(self):
|
||||
from gcloud.datastore.entity import Entity
|
||||
from gcloud.datastore.helpers import _property_tuples
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
name = 'foo'
|
||||
value = u'Foo'
|
||||
pb = self._makePB()
|
||||
key = Key('KIND', 123, project='PROJECT')
|
||||
entity = Entity(key=key)
|
||||
entity[name] = value
|
||||
self._callFUT(pb, entity)
|
||||
entity_pb = pb.entity_value
|
||||
self.assertEqual(entity_pb.key, key.to_protobuf())
|
||||
|
||||
prop_dict = dict(_property_tuples(entity_pb))
|
||||
self.assertEqual(len(prop_dict), 1)
|
||||
self.assertEqual(list(prop_dict.keys()), [name])
|
||||
self.assertEqual(prop_dict[name].string_value, value)
|
||||
|
||||
def test_array(self):
|
||||
pb = self._makePB()
|
||||
values = [u'a', 0, 3.14]
|
||||
self._callFUT(pb, values)
|
||||
marshalled = pb.array_value.values
|
||||
self.assertEqual(len(marshalled), len(values))
|
||||
self.assertEqual(marshalled[0].string_value, values[0])
|
||||
self.assertEqual(marshalled[1].integer_value, values[1])
|
||||
self.assertEqual(marshalled[2].double_value, values[2])
|
||||
|
||||
def test_geo_point(self):
|
||||
from google.type import latlng_pb2
|
||||
from gcloud.datastore.helpers import GeoPoint
|
||||
|
||||
pb = self._makePB()
|
||||
lat = 9.11
|
||||
lng = 3.337
|
||||
geo_pt = GeoPoint(latitude=lat, longitude=lng)
|
||||
geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
|
||||
self._callFUT(pb, geo_pt)
|
||||
self.assertEqual(pb.geo_point_value, geo_pt_pb)
|
||||
|
||||
|
||||
class Test__get_meaning(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, *args, **kwargs):
|
||||
from gcloud.datastore.helpers import _get_meaning
|
||||
return _get_meaning(*args, **kwargs)
|
||||
|
||||
def test_no_meaning(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
value_pb = entity_pb2.Value()
|
||||
result = self._callFUT(value_pb)
|
||||
self.assertEqual(result, None)
|
||||
|
||||
def test_single(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
value_pb = entity_pb2.Value()
|
||||
value_pb.meaning = meaning = 22
|
||||
value_pb.string_value = u'hi'
|
||||
result = self._callFUT(value_pb)
|
||||
self.assertEqual(meaning, result)
|
||||
|
||||
def test_empty_array_value(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
value_pb = entity_pb2.Value()
|
||||
value_pb.array_value.values.add()
|
||||
value_pb.array_value.values.pop()
|
||||
|
||||
result = self._callFUT(value_pb, is_list=True)
|
||||
self.assertEqual(None, result)
|
||||
|
||||
def test_array_value(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
value_pb = entity_pb2.Value()
|
||||
meaning = 9
|
||||
sub_value_pb1 = value_pb.array_value.values.add()
|
||||
sub_value_pb2 = value_pb.array_value.values.add()
|
||||
|
||||
sub_value_pb1.meaning = sub_value_pb2.meaning = meaning
|
||||
sub_value_pb1.string_value = u'hi'
|
||||
sub_value_pb2.string_value = u'bye'
|
||||
|
||||
result = self._callFUT(value_pb, is_list=True)
|
||||
self.assertEqual(meaning, result)
|
||||
|
||||
def test_array_value_multiple_meanings(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
value_pb = entity_pb2.Value()
|
||||
meaning1 = 9
|
||||
meaning2 = 10
|
||||
sub_value_pb1 = value_pb.array_value.values.add()
|
||||
sub_value_pb2 = value_pb.array_value.values.add()
|
||||
|
||||
sub_value_pb1.meaning = meaning1
|
||||
sub_value_pb2.meaning = meaning2
|
||||
sub_value_pb1.string_value = u'hi'
|
||||
sub_value_pb2.string_value = u'bye'
|
||||
|
||||
result = self._callFUT(value_pb, is_list=True)
|
||||
self.assertEqual(result, [meaning1, meaning2])
|
||||
|
||||
def test_array_value_meaning_partially_unset(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
value_pb = entity_pb2.Value()
|
||||
meaning1 = 9
|
||||
sub_value_pb1 = value_pb.array_value.values.add()
|
||||
sub_value_pb2 = value_pb.array_value.values.add()
|
||||
|
||||
sub_value_pb1.meaning = meaning1
|
||||
sub_value_pb1.string_value = u'hi'
|
||||
sub_value_pb2.string_value = u'bye'
|
||||
|
||||
result = self._callFUT(value_pb, is_list=True)
|
||||
self.assertEqual(result, [meaning1, None])
|
||||
|
||||
|
||||
class TestGeoPoint(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.helpers import GeoPoint
|
||||
return GeoPoint
|
||||
|
||||
def _makeOne(self, *args, **kwargs):
|
||||
return self._getTargetClass()(*args, **kwargs)
|
||||
|
||||
def test_constructor(self):
|
||||
lat = 81.2
|
||||
lng = 359.9999
|
||||
geo_pt = self._makeOne(lat, lng)
|
||||
self.assertEqual(geo_pt.latitude, lat)
|
||||
self.assertEqual(geo_pt.longitude, lng)
|
||||
|
||||
def test_to_protobuf(self):
|
||||
from google.type import latlng_pb2
|
||||
|
||||
lat = 0.0001
|
||||
lng = 20.03
|
||||
geo_pt = self._makeOne(lat, lng)
|
||||
result = geo_pt.to_protobuf()
|
||||
geo_pt_pb = latlng_pb2.LatLng(latitude=lat, longitude=lng)
|
||||
self.assertEqual(result, geo_pt_pb)
|
||||
|
||||
def test___eq__(self):
|
||||
lat = 0.0001
|
||||
lng = 20.03
|
||||
geo_pt1 = self._makeOne(lat, lng)
|
||||
geo_pt2 = self._makeOne(lat, lng)
|
||||
self.assertEqual(geo_pt1, geo_pt2)
|
||||
|
||||
def test___eq__type_differ(self):
|
||||
lat = 0.0001
|
||||
lng = 20.03
|
||||
geo_pt1 = self._makeOne(lat, lng)
|
||||
geo_pt2 = object()
|
||||
self.assertNotEqual(geo_pt1, geo_pt2)
|
||||
|
||||
def test___ne__same_value(self):
|
||||
lat = 0.0001
|
||||
lng = 20.03
|
||||
geo_pt1 = self._makeOne(lat, lng)
|
||||
geo_pt2 = self._makeOne(lat, lng)
|
||||
comparison_val = (geo_pt1 != geo_pt2)
|
||||
self.assertFalse(comparison_val)
|
||||
|
||||
def test___ne__(self):
|
||||
geo_pt1 = self._makeOne(0.0, 1.0)
|
||||
geo_pt2 = self._makeOne(2.0, 3.0)
|
||||
self.assertNotEqual(geo_pt1, geo_pt2)
|
431
venv/Lib/site-packages/gcloud/datastore/test_key.py
Normal file
431
venv/Lib/site-packages/gcloud/datastore/test_key.py
Normal file
|
@ -0,0 +1,431 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestKey(unittest2.TestCase):
|
||||
|
||||
_DEFAULT_PROJECT = 'PROJECT'
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.key import Key
|
||||
return Key
|
||||
|
||||
def _makeOne(self, *args, **kwargs):
|
||||
return self._getTargetClass()(*args, **kwargs)
|
||||
|
||||
def test_ctor_empty(self):
|
||||
self.assertRaises(ValueError, self._makeOne)
|
||||
|
||||
def test_ctor_no_project(self):
|
||||
klass = self._getTargetClass()
|
||||
self.assertRaises(ValueError, klass, 'KIND')
|
||||
|
||||
def test_ctor_w_explicit_project_empty_path(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
self.assertRaises(ValueError, self._makeOne, project=_PROJECT)
|
||||
|
||||
def test_ctor_parent(self):
|
||||
_PARENT_KIND = 'KIND1'
|
||||
_PARENT_ID = 1234
|
||||
_PARENT_PROJECT = 'PROJECT-ALT'
|
||||
_PARENT_NAMESPACE = 'NAMESPACE'
|
||||
_CHILD_KIND = 'KIND2'
|
||||
_CHILD_ID = 2345
|
||||
_PATH = [
|
||||
{'kind': _PARENT_KIND, 'id': _PARENT_ID},
|
||||
{'kind': _CHILD_KIND, 'id': _CHILD_ID},
|
||||
]
|
||||
parent_key = self._makeOne(_PARENT_KIND, _PARENT_ID,
|
||||
project=_PARENT_PROJECT,
|
||||
namespace=_PARENT_NAMESPACE)
|
||||
key = self._makeOne(_CHILD_KIND, _CHILD_ID, parent=parent_key)
|
||||
self.assertEqual(key.project, parent_key.project)
|
||||
self.assertEqual(key.namespace, parent_key.namespace)
|
||||
self.assertEqual(key.kind, _CHILD_KIND)
|
||||
self.assertEqual(key.path, _PATH)
|
||||
self.assertTrue(key.parent is parent_key)
|
||||
|
||||
def test_ctor_partial_parent(self):
|
||||
parent_key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
with self.assertRaises(ValueError):
|
||||
self._makeOne('KIND2', 1234, parent=parent_key)
|
||||
|
||||
def test_ctor_parent_bad_type(self):
|
||||
with self.assertRaises(AttributeError):
|
||||
self._makeOne('KIND2', 1234, parent=('KIND1', 1234),
|
||||
project=self._DEFAULT_PROJECT)
|
||||
|
||||
def test_ctor_parent_bad_namespace(self):
|
||||
parent_key = self._makeOne('KIND', 1234, namespace='FOO',
|
||||
project=self._DEFAULT_PROJECT)
|
||||
with self.assertRaises(ValueError):
|
||||
self._makeOne('KIND2', 1234, namespace='BAR', parent=parent_key,
|
||||
project=self._DEFAULT_PROJECT)
|
||||
|
||||
def test_ctor_parent_bad_project(self):
|
||||
parent_key = self._makeOne('KIND', 1234, project='FOO')
|
||||
with self.assertRaises(ValueError):
|
||||
self._makeOne('KIND2', 1234, parent=parent_key,
|
||||
project='BAR')
|
||||
|
||||
def test_ctor_parent_empty_path(self):
|
||||
parent_key = self._makeOne('KIND', 1234,
|
||||
project=self._DEFAULT_PROJECT)
|
||||
with self.assertRaises(ValueError):
|
||||
self._makeOne(parent=parent_key)
|
||||
|
||||
def test_ctor_explicit(self):
|
||||
_PROJECT = 'PROJECT-ALT'
|
||||
_NAMESPACE = 'NAMESPACE'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
_PATH = [{'kind': _KIND, 'id': _ID}]
|
||||
key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE,
|
||||
project=_PROJECT)
|
||||
self.assertEqual(key.project, _PROJECT)
|
||||
self.assertEqual(key.namespace, _NAMESPACE)
|
||||
self.assertEqual(key.kind, _KIND)
|
||||
self.assertEqual(key.path, _PATH)
|
||||
|
||||
def test_ctor_bad_kind(self):
|
||||
self.assertRaises(ValueError, self._makeOne, object(),
|
||||
project=self._DEFAULT_PROJECT)
|
||||
|
||||
def test_ctor_bad_id_or_name(self):
|
||||
self.assertRaises(ValueError, self._makeOne, 'KIND', object(),
|
||||
project=self._DEFAULT_PROJECT)
|
||||
self.assertRaises(ValueError, self._makeOne, 'KIND', None,
|
||||
project=self._DEFAULT_PROJECT)
|
||||
self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None,
|
||||
project=self._DEFAULT_PROJECT)
|
||||
|
||||
def test__clone(self):
|
||||
_PROJECT = 'PROJECT-ALT'
|
||||
_NAMESPACE = 'NAMESPACE'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
_PATH = [{'kind': _KIND, 'id': _ID}]
|
||||
key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE,
|
||||
project=_PROJECT)
|
||||
clone = key._clone()
|
||||
self.assertEqual(clone.project, _PROJECT)
|
||||
self.assertEqual(clone.namespace, _NAMESPACE)
|
||||
self.assertEqual(clone.kind, _KIND)
|
||||
self.assertEqual(clone.path, _PATH)
|
||||
|
||||
def test__clone_with_parent(self):
|
||||
_PROJECT = 'PROJECT-ALT'
|
||||
_NAMESPACE = 'NAMESPACE'
|
||||
_KIND1 = 'PARENT'
|
||||
_KIND2 = 'KIND'
|
||||
_ID1 = 1234
|
||||
_ID2 = 2345
|
||||
_PATH = [{'kind': _KIND1, 'id': _ID1}, {'kind': _KIND2, 'id': _ID2}]
|
||||
|
||||
parent = self._makeOne(_KIND1, _ID1, namespace=_NAMESPACE,
|
||||
project=_PROJECT)
|
||||
key = self._makeOne(_KIND2, _ID2, parent=parent)
|
||||
self.assertTrue(key.parent is parent)
|
||||
clone = key._clone()
|
||||
self.assertTrue(clone.parent is key.parent)
|
||||
self.assertEqual(clone.project, _PROJECT)
|
||||
self.assertEqual(clone.namespace, _NAMESPACE)
|
||||
self.assertEqual(clone.path, _PATH)
|
||||
|
||||
def test___eq_____ne___w_non_key(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_NAME = 'one'
|
||||
key = self._makeOne(_KIND, _NAME, project=_PROJECT)
|
||||
self.assertFalse(key == object())
|
||||
self.assertTrue(key != object())
|
||||
|
||||
def test___eq_____ne___two_incomplete_keys_same_kind(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
key1 = self._makeOne(_KIND, project=_PROJECT)
|
||||
key2 = self._makeOne(_KIND, project=_PROJECT)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___incomplete_key_w_complete_key_same_kind(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
key1 = self._makeOne(_KIND, project=_PROJECT)
|
||||
key2 = self._makeOne(_KIND, _ID, project=_PROJECT)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___complete_key_w_incomplete_key_same_kind(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
key1 = self._makeOne(_KIND, _ID, project=_PROJECT)
|
||||
key2 = self._makeOne(_KIND, project=_PROJECT)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_different_ids(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID1 = 1234
|
||||
_ID2 = 2345
|
||||
key1 = self._makeOne(_KIND, _ID1, project=_PROJECT)
|
||||
key2 = self._makeOne(_KIND, _ID2, project=_PROJECT)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_and_id(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
key1 = self._makeOne(_KIND, _ID, project=_PROJECT)
|
||||
key2 = self._makeOne(_KIND, _ID, project=_PROJECT)
|
||||
self.assertTrue(key1 == key2)
|
||||
self.assertFalse(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_and_id_different_project(self):
|
||||
_PROJECT1 = 'PROJECT1'
|
||||
_PROJECT2 = 'PROJECT2'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
key1 = self._makeOne(_KIND, _ID, project=_PROJECT1)
|
||||
key2 = self._makeOne(_KIND, _ID, project=_PROJECT2)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_and_id_different_namespace(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_NAMESPACE1 = 'NAMESPACE1'
|
||||
_NAMESPACE2 = 'NAMESPACE2'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
key1 = self._makeOne(_KIND, _ID, project=_PROJECT,
|
||||
namespace=_NAMESPACE1)
|
||||
key2 = self._makeOne(_KIND, _ID, project=_PROJECT,
|
||||
namespace=_NAMESPACE2)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_different_names(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_NAME1 = 'one'
|
||||
_NAME2 = 'two'
|
||||
key1 = self._makeOne(_KIND, _NAME1, project=_PROJECT)
|
||||
key2 = self._makeOne(_KIND, _NAME2, project=_PROJECT)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_and_name(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_NAME = 'one'
|
||||
key1 = self._makeOne(_KIND, _NAME, project=_PROJECT)
|
||||
key2 = self._makeOne(_KIND, _NAME, project=_PROJECT)
|
||||
self.assertTrue(key1 == key2)
|
||||
self.assertFalse(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_and_name_different_project(self):
|
||||
_PROJECT1 = 'PROJECT1'
|
||||
_PROJECT2 = 'PROJECT2'
|
||||
_KIND = 'KIND'
|
||||
_NAME = 'one'
|
||||
key1 = self._makeOne(_KIND, _NAME, project=_PROJECT1)
|
||||
key2 = self._makeOne(_KIND, _NAME, project=_PROJECT2)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___eq_____ne___same_kind_and_name_different_namespace(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_NAMESPACE1 = 'NAMESPACE1'
|
||||
_NAMESPACE2 = 'NAMESPACE2'
|
||||
_KIND = 'KIND'
|
||||
_NAME = 'one'
|
||||
key1 = self._makeOne(_KIND, _NAME, project=_PROJECT,
|
||||
namespace=_NAMESPACE1)
|
||||
key2 = self._makeOne(_KIND, _NAME, project=_PROJECT,
|
||||
namespace=_NAMESPACE2)
|
||||
self.assertFalse(key1 == key2)
|
||||
self.assertTrue(key1 != key2)
|
||||
|
||||
def test___hash___incomplete(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
key = self._makeOne(_KIND, project=_PROJECT)
|
||||
self.assertNotEqual(hash(key),
|
||||
hash(_KIND) + hash(_PROJECT) + hash(None))
|
||||
|
||||
def test___hash___completed_w_id(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 1234
|
||||
key = self._makeOne(_KIND, _ID, project=_PROJECT)
|
||||
self.assertNotEqual(hash(key),
|
||||
hash(_KIND) + hash(_ID) +
|
||||
hash(_PROJECT) + hash(None))
|
||||
|
||||
def test___hash___completed_w_name(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_NAME = 'NAME'
|
||||
key = self._makeOne(_KIND, _NAME, project=_PROJECT)
|
||||
self.assertNotEqual(hash(key),
|
||||
hash(_KIND) + hash(_NAME) +
|
||||
hash(_PROJECT) + hash(None))
|
||||
|
||||
def test_completed_key_on_partial_w_id(self):
|
||||
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
_ID = 1234
|
||||
new_key = key.completed_key(_ID)
|
||||
self.assertFalse(key is new_key)
|
||||
self.assertEqual(new_key.id, _ID)
|
||||
self.assertEqual(new_key.name, None)
|
||||
|
||||
def test_completed_key_on_partial_w_name(self):
|
||||
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
_NAME = 'NAME'
|
||||
new_key = key.completed_key(_NAME)
|
||||
self.assertFalse(key is new_key)
|
||||
self.assertEqual(new_key.id, None)
|
||||
self.assertEqual(new_key.name, _NAME)
|
||||
|
||||
def test_completed_key_on_partial_w_invalid(self):
|
||||
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
self.assertRaises(ValueError, key.completed_key, object())
|
||||
|
||||
def test_completed_key_on_complete(self):
|
||||
key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT)
|
||||
self.assertRaises(ValueError, key.completed_key, 5678)
|
||||
|
||||
def test_to_protobuf_defaults(self):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
_KIND = 'KIND'
|
||||
key = self._makeOne(_KIND, project=self._DEFAULT_PROJECT)
|
||||
pb = key.to_protobuf()
|
||||
self.assertTrue(isinstance(pb, entity_pb2.Key))
|
||||
|
||||
# Check partition ID.
|
||||
self.assertEqual(pb.partition_id.project_id, self._DEFAULT_PROJECT)
|
||||
# Unset values are False-y.
|
||||
self.assertEqual(pb.partition_id.namespace_id, '')
|
||||
|
||||
# Check the element PB matches the partial key and kind.
|
||||
elem, = list(pb.path)
|
||||
self.assertEqual(elem.kind, _KIND)
|
||||
# Unset values are False-y.
|
||||
self.assertEqual(elem.name, '')
|
||||
# Unset values are False-y.
|
||||
self.assertEqual(elem.id, 0)
|
||||
|
||||
def test_to_protobuf_w_explicit_project(self):
|
||||
_PROJECT = 'PROJECT-ALT'
|
||||
key = self._makeOne('KIND', project=_PROJECT)
|
||||
pb = key.to_protobuf()
|
||||
self.assertEqual(pb.partition_id.project_id, _PROJECT)
|
||||
|
||||
def test_to_protobuf_w_explicit_namespace(self):
|
||||
_NAMESPACE = 'NAMESPACE'
|
||||
key = self._makeOne('KIND', namespace=_NAMESPACE,
|
||||
project=self._DEFAULT_PROJECT)
|
||||
pb = key.to_protobuf()
|
||||
self.assertEqual(pb.partition_id.namespace_id, _NAMESPACE)
|
||||
|
||||
def test_to_protobuf_w_explicit_path(self):
|
||||
_PARENT = 'PARENT'
|
||||
_CHILD = 'CHILD'
|
||||
_ID = 1234
|
||||
_NAME = 'NAME'
|
||||
key = self._makeOne(_PARENT, _NAME, _CHILD, _ID,
|
||||
project=self._DEFAULT_PROJECT)
|
||||
pb = key.to_protobuf()
|
||||
elems = list(pb.path)
|
||||
self.assertEqual(len(elems), 2)
|
||||
self.assertEqual(elems[0].kind, _PARENT)
|
||||
self.assertEqual(elems[0].name, _NAME)
|
||||
self.assertEqual(elems[1].kind, _CHILD)
|
||||
self.assertEqual(elems[1].id, _ID)
|
||||
|
||||
def test_to_protobuf_w_no_kind(self):
|
||||
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
# Force the 'kind' to be unset. Maybe `to_protobuf` should fail
|
||||
# on this? The backend certainly will.
|
||||
key._path[-1].pop('kind')
|
||||
pb = key.to_protobuf()
|
||||
# Unset values are False-y.
|
||||
self.assertEqual(pb.path[0].kind, '')
|
||||
|
||||
def test_is_partial_no_name_or_id(self):
|
||||
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
self.assertTrue(key.is_partial)
|
||||
|
||||
def test_is_partial_w_id(self):
|
||||
_ID = 1234
|
||||
key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT)
|
||||
self.assertFalse(key.is_partial)
|
||||
|
||||
def test_is_partial_w_name(self):
|
||||
_NAME = 'NAME'
|
||||
key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT)
|
||||
self.assertFalse(key.is_partial)
|
||||
|
||||
def test_id_or_name_no_name_or_id(self):
|
||||
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
self.assertEqual(key.id_or_name, None)
|
||||
|
||||
def test_id_or_name_no_name_or_id_child(self):
|
||||
key = self._makeOne('KIND1', 1234, 'KIND2',
|
||||
project=self._DEFAULT_PROJECT)
|
||||
self.assertEqual(key.id_or_name, None)
|
||||
|
||||
def test_id_or_name_w_id_only(self):
|
||||
_ID = 1234
|
||||
key = self._makeOne('KIND', _ID, project=self._DEFAULT_PROJECT)
|
||||
self.assertEqual(key.id_or_name, _ID)
|
||||
|
||||
def test_id_or_name_w_name_only(self):
|
||||
_NAME = 'NAME'
|
||||
key = self._makeOne('KIND', _NAME, project=self._DEFAULT_PROJECT)
|
||||
self.assertEqual(key.id_or_name, _NAME)
|
||||
|
||||
def test_parent_default(self):
|
||||
key = self._makeOne('KIND', project=self._DEFAULT_PROJECT)
|
||||
self.assertEqual(key.parent, None)
|
||||
|
||||
def test_parent_explicit_top_level(self):
|
||||
key = self._makeOne('KIND', 1234, project=self._DEFAULT_PROJECT)
|
||||
self.assertEqual(key.parent, None)
|
||||
|
||||
def test_parent_explicit_nested(self):
|
||||
_PARENT_KIND = 'KIND1'
|
||||
_PARENT_ID = 1234
|
||||
_PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}]
|
||||
key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2',
|
||||
project=self._DEFAULT_PROJECT)
|
||||
self.assertEqual(key.parent.path, _PARENT_PATH)
|
||||
|
||||
def test_parent_multiple_calls(self):
|
||||
_PARENT_KIND = 'KIND1'
|
||||
_PARENT_ID = 1234
|
||||
_PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}]
|
||||
key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2',
|
||||
project=self._DEFAULT_PROJECT)
|
||||
parent = key.parent
|
||||
self.assertEqual(parent.path, _PARENT_PATH)
|
||||
new_parent = key.parent
|
||||
self.assertTrue(parent is new_parent)
|
759
venv/Lib/site-packages/gcloud/datastore/test_query.py
Normal file
759
venv/Lib/site-packages/gcloud/datastore/test_query.py
Normal file
|
@ -0,0 +1,759 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestQuery(unittest2.TestCase):
|
||||
|
||||
_PROJECT = 'PROJECT'
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.query import Query
|
||||
return Query
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def _makeClient(self, connection=None):
|
||||
if connection is None:
|
||||
connection = _Connection()
|
||||
return _Client(self._PROJECT, connection)
|
||||
|
||||
def test_ctor_defaults(self):
|
||||
client = self._makeClient()
|
||||
query = self._makeOne(client)
|
||||
self.assertTrue(query._client is client)
|
||||
self.assertEqual(query.project, client.project)
|
||||
self.assertEqual(query.kind, None)
|
||||
self.assertEqual(query.namespace, client.namespace)
|
||||
self.assertEqual(query.ancestor, None)
|
||||
self.assertEqual(query.filters, [])
|
||||
self.assertEqual(query.projection, [])
|
||||
self.assertEqual(query.order, [])
|
||||
self.assertEqual(query.distinct_on, [])
|
||||
|
||||
def test_ctor_explicit(self):
|
||||
from gcloud.datastore.key import Key
|
||||
_PROJECT = 'OTHER_PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_NAMESPACE = 'OTHER_NAMESPACE'
|
||||
client = self._makeClient()
|
||||
ancestor = Key('ANCESTOR', 123, project=_PROJECT)
|
||||
FILTERS = [('foo', '=', 'Qux'), ('bar', '<', 17)]
|
||||
PROJECTION = ['foo', 'bar', 'baz']
|
||||
ORDER = ['foo', 'bar']
|
||||
DISTINCT_ON = ['foo']
|
||||
query = self._makeOne(
|
||||
client,
|
||||
kind=_KIND,
|
||||
project=_PROJECT,
|
||||
namespace=_NAMESPACE,
|
||||
ancestor=ancestor,
|
||||
filters=FILTERS,
|
||||
projection=PROJECTION,
|
||||
order=ORDER,
|
||||
distinct_on=DISTINCT_ON,
|
||||
)
|
||||
self.assertTrue(query._client is client)
|
||||
self.assertEqual(query.project, _PROJECT)
|
||||
self.assertEqual(query.kind, _KIND)
|
||||
self.assertEqual(query.namespace, _NAMESPACE)
|
||||
self.assertEqual(query.ancestor.path, ancestor.path)
|
||||
self.assertEqual(query.filters, FILTERS)
|
||||
self.assertEqual(query.projection, PROJECTION)
|
||||
self.assertEqual(query.order, ORDER)
|
||||
self.assertEqual(query.distinct_on, DISTINCT_ON)
|
||||
|
||||
def test_ctor_bad_projection(self):
|
||||
BAD_PROJECTION = object()
|
||||
self.assertRaises(TypeError, self._makeOne, self._makeClient(),
|
||||
projection=BAD_PROJECTION)
|
||||
|
||||
def test_ctor_bad_order(self):
|
||||
BAD_ORDER = object()
|
||||
self.assertRaises(TypeError, self._makeOne, self._makeClient(),
|
||||
order=BAD_ORDER)
|
||||
|
||||
def test_ctor_bad_distinct_on(self):
|
||||
BAD_DISTINCT_ON = object()
|
||||
self.assertRaises(TypeError, self._makeOne, self._makeClient(),
|
||||
distinct_on=BAD_DISTINCT_ON)
|
||||
|
||||
def test_ctor_bad_filters(self):
|
||||
FILTERS_CANT_UNPACK = [('one', 'two')]
|
||||
self.assertRaises(ValueError, self._makeOne, self._makeClient(),
|
||||
filters=FILTERS_CANT_UNPACK)
|
||||
|
||||
def test_namespace_setter_w_non_string(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
|
||||
def _assign(val):
|
||||
query.namespace = val
|
||||
|
||||
self.assertRaises(ValueError, _assign, object())
|
||||
|
||||
def test_namespace_setter(self):
|
||||
_NAMESPACE = 'OTHER_NAMESPACE'
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.namespace = _NAMESPACE
|
||||
self.assertEqual(query.namespace, _NAMESPACE)
|
||||
|
||||
def test_kind_setter_w_non_string(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
|
||||
def _assign(val):
|
||||
query.kind = val
|
||||
|
||||
self.assertRaises(TypeError, _assign, object())
|
||||
|
||||
def test_kind_setter_wo_existing(self):
|
||||
_KIND = 'KIND'
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.kind = _KIND
|
||||
self.assertEqual(query.kind, _KIND)
|
||||
|
||||
def test_kind_setter_w_existing(self):
|
||||
_KIND_BEFORE = 'KIND_BEFORE'
|
||||
_KIND_AFTER = 'KIND_AFTER'
|
||||
query = self._makeOne(self._makeClient(), kind=_KIND_BEFORE)
|
||||
self.assertEqual(query.kind, _KIND_BEFORE)
|
||||
query.kind = _KIND_AFTER
|
||||
self.assertEqual(query.project, self._PROJECT)
|
||||
self.assertEqual(query.kind, _KIND_AFTER)
|
||||
|
||||
def test_ancestor_setter_w_non_key(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
|
||||
def _assign(val):
|
||||
query.ancestor = val
|
||||
|
||||
self.assertRaises(TypeError, _assign, object())
|
||||
self.assertRaises(TypeError, _assign, ['KIND', 'NAME'])
|
||||
|
||||
def test_ancestor_setter_w_key(self):
|
||||
from gcloud.datastore.key import Key
|
||||
_NAME = u'NAME'
|
||||
key = Key('KIND', 123, project=self._PROJECT)
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.add_filter('name', '=', _NAME)
|
||||
query.ancestor = key
|
||||
self.assertEqual(query.ancestor.path, key.path)
|
||||
|
||||
def test_ancestor_deleter_w_key(self):
|
||||
from gcloud.datastore.key import Key
|
||||
key = Key('KIND', 123, project=self._PROJECT)
|
||||
query = self._makeOne(client=self._makeClient(), ancestor=key)
|
||||
del query.ancestor
|
||||
self.assertTrue(query.ancestor is None)
|
||||
|
||||
def test_add_filter_setter_w_unknown_operator(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
self.assertRaises(ValueError, query.add_filter,
|
||||
'firstname', '~~', 'John')
|
||||
|
||||
def test_add_filter_w_known_operator(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.add_filter('firstname', '=', u'John')
|
||||
self.assertEqual(query.filters, [('firstname', '=', u'John')])
|
||||
|
||||
def test_add_filter_w_all_operators(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.add_filter('leq_prop', '<=', u'val1')
|
||||
query.add_filter('geq_prop', '>=', u'val2')
|
||||
query.add_filter('lt_prop', '<', u'val3')
|
||||
query.add_filter('gt_prop', '>', u'val4')
|
||||
query.add_filter('eq_prop', '=', u'val5')
|
||||
self.assertEqual(len(query.filters), 5)
|
||||
self.assertEqual(query.filters[0], ('leq_prop', '<=', u'val1'))
|
||||
self.assertEqual(query.filters[1], ('geq_prop', '>=', u'val2'))
|
||||
self.assertEqual(query.filters[2], ('lt_prop', '<', u'val3'))
|
||||
self.assertEqual(query.filters[3], ('gt_prop', '>', u'val4'))
|
||||
self.assertEqual(query.filters[4], ('eq_prop', '=', u'val5'))
|
||||
|
||||
def test_add_filter_w_known_operator_and_entity(self):
|
||||
from gcloud.datastore.entity import Entity
|
||||
query = self._makeOne(self._makeClient())
|
||||
other = Entity()
|
||||
other['firstname'] = u'John'
|
||||
other['lastname'] = u'Smith'
|
||||
query.add_filter('other', '=', other)
|
||||
self.assertEqual(query.filters, [('other', '=', other)])
|
||||
|
||||
def test_add_filter_w_whitespace_property_name(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
PROPERTY_NAME = ' property with lots of space '
|
||||
query.add_filter(PROPERTY_NAME, '=', u'John')
|
||||
self.assertEqual(query.filters, [(PROPERTY_NAME, '=', u'John')])
|
||||
|
||||
def test_add_filter___key__valid_key(self):
|
||||
from gcloud.datastore.key import Key
|
||||
query = self._makeOne(self._makeClient())
|
||||
key = Key('Foo', project=self._PROJECT)
|
||||
query.add_filter('__key__', '=', key)
|
||||
self.assertEqual(query.filters, [('__key__', '=', key)])
|
||||
|
||||
def test_filter___key__not_equal_operator(self):
|
||||
from gcloud.datastore.key import Key
|
||||
key = Key('Foo', project=self._PROJECT)
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.add_filter('__key__', '<', key)
|
||||
self.assertEqual(query.filters, [('__key__', '<', key)])
|
||||
|
||||
def test_filter___key__invalid_value(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
self.assertRaises(ValueError, query.add_filter, '__key__', '=', None)
|
||||
|
||||
def test_projection_setter_empty(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.projection = []
|
||||
self.assertEqual(query.projection, [])
|
||||
|
||||
def test_projection_setter_string(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.projection = 'field1'
|
||||
self.assertEqual(query.projection, ['field1'])
|
||||
|
||||
def test_projection_setter_non_empty(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.projection = ['field1', 'field2']
|
||||
self.assertEqual(query.projection, ['field1', 'field2'])
|
||||
|
||||
def test_projection_setter_multiple_calls(self):
|
||||
_PROJECTION1 = ['field1', 'field2']
|
||||
_PROJECTION2 = ['field3']
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.projection = _PROJECTION1
|
||||
self.assertEqual(query.projection, _PROJECTION1)
|
||||
query.projection = _PROJECTION2
|
||||
self.assertEqual(query.projection, _PROJECTION2)
|
||||
|
||||
def test_keys_only(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.keys_only()
|
||||
self.assertEqual(query.projection, ['__key__'])
|
||||
|
||||
def test_key_filter_defaults(self):
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
client = self._makeClient()
|
||||
query = self._makeOne(client)
|
||||
self.assertEqual(query.filters, [])
|
||||
key = Key('Kind', 1234, project='project')
|
||||
query.key_filter(key)
|
||||
self.assertEqual(query.filters, [('__key__', '=', key)])
|
||||
|
||||
def test_key_filter_explicit(self):
|
||||
from gcloud.datastore.key import Key
|
||||
|
||||
client = self._makeClient()
|
||||
query = self._makeOne(client)
|
||||
self.assertEqual(query.filters, [])
|
||||
key = Key('Kind', 1234, project='project')
|
||||
query.key_filter(key, operator='>')
|
||||
self.assertEqual(query.filters, [('__key__', '>', key)])
|
||||
|
||||
def test_order_setter_empty(self):
|
||||
query = self._makeOne(self._makeClient(), order=['foo', '-bar'])
|
||||
query.order = []
|
||||
self.assertEqual(query.order, [])
|
||||
|
||||
def test_order_setter_string(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.order = 'field'
|
||||
self.assertEqual(query.order, ['field'])
|
||||
|
||||
def test_order_setter_single_item_list_desc(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.order = ['-field']
|
||||
self.assertEqual(query.order, ['-field'])
|
||||
|
||||
def test_order_setter_multiple(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.order = ['foo', '-bar']
|
||||
self.assertEqual(query.order, ['foo', '-bar'])
|
||||
|
||||
def test_distinct_on_setter_empty(self):
|
||||
query = self._makeOne(self._makeClient(), distinct_on=['foo', 'bar'])
|
||||
query.distinct_on = []
|
||||
self.assertEqual(query.distinct_on, [])
|
||||
|
||||
def test_distinct_on_setter_string(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.distinct_on = 'field1'
|
||||
self.assertEqual(query.distinct_on, ['field1'])
|
||||
|
||||
def test_distinct_on_setter_non_empty(self):
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.distinct_on = ['field1', 'field2']
|
||||
self.assertEqual(query.distinct_on, ['field1', 'field2'])
|
||||
|
||||
def test_distinct_on_multiple_calls(self):
|
||||
_DISTINCT_ON1 = ['field1', 'field2']
|
||||
_DISTINCT_ON2 = ['field3']
|
||||
query = self._makeOne(self._makeClient())
|
||||
query.distinct_on = _DISTINCT_ON1
|
||||
self.assertEqual(query.distinct_on, _DISTINCT_ON1)
|
||||
query.distinct_on = _DISTINCT_ON2
|
||||
self.assertEqual(query.distinct_on, _DISTINCT_ON2)
|
||||
|
||||
def test_fetch_defaults_w_client_attr(self):
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = self._makeOne(client)
|
||||
iterator = query.fetch()
|
||||
self.assertTrue(iterator._query is query)
|
||||
self.assertTrue(iterator._client is client)
|
||||
self.assertEqual(iterator._limit, None)
|
||||
self.assertEqual(iterator._offset, 0)
|
||||
|
||||
def test_fetch_w_explicit_client(self):
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
other_client = self._makeClient(connection)
|
||||
query = self._makeOne(client)
|
||||
iterator = query.fetch(limit=7, offset=8, client=other_client)
|
||||
self.assertTrue(iterator._query is query)
|
||||
self.assertTrue(iterator._client is other_client)
|
||||
self.assertEqual(iterator._limit, 7)
|
||||
self.assertEqual(iterator._offset, 8)
|
||||
|
||||
|
||||
class TestIterator(unittest2.TestCase):
|
||||
_PROJECT = 'PROJECT'
|
||||
_NAMESPACE = 'NAMESPACE'
|
||||
_KIND = 'KIND'
|
||||
_ID = 123
|
||||
_START = b'\x00'
|
||||
_END = b'\xFF'
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.query import Iterator
|
||||
return Iterator
|
||||
|
||||
def _makeOne(self, *args, **kw):
|
||||
return self._getTargetClass()(*args, **kw)
|
||||
|
||||
def _addQueryResults(self, connection, cursor=_END, more=False,
|
||||
skipped_results=None, no_entity=False):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
from gcloud.datastore.helpers import _new_value_pb
|
||||
|
||||
if more:
|
||||
more_enum = query_pb2.QueryResultBatch.NOT_FINISHED
|
||||
else:
|
||||
more_enum = query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT
|
||||
_ID = 123
|
||||
if no_entity:
|
||||
entities = []
|
||||
else:
|
||||
entity_pb = entity_pb2.Entity()
|
||||
entity_pb.key.partition_id.project_id = self._PROJECT
|
||||
path_element = entity_pb.key.path.add()
|
||||
path_element.kind = self._KIND
|
||||
path_element.id = _ID
|
||||
value_pb = _new_value_pb(entity_pb, 'foo')
|
||||
value_pb.string_value = u'Foo'
|
||||
entities = [entity_pb]
|
||||
|
||||
connection._results.append(
|
||||
(entities, cursor, more_enum, skipped_results))
|
||||
|
||||
def _makeClient(self, connection=None):
|
||||
if connection is None:
|
||||
connection = _Connection()
|
||||
return _Client(self._PROJECT, connection)
|
||||
|
||||
def test_ctor_defaults(self):
|
||||
connection = _Connection()
|
||||
query = object()
|
||||
iterator = self._makeOne(query, connection)
|
||||
self.assertTrue(iterator._query is query)
|
||||
self.assertEqual(iterator._limit, None)
|
||||
self.assertEqual(iterator._offset, None)
|
||||
self.assertEqual(iterator._skipped_results, None)
|
||||
|
||||
def test_ctor_explicit(self):
|
||||
client = self._makeClient()
|
||||
query = _Query(client)
|
||||
iterator = self._makeOne(query, client, 13, 29)
|
||||
self.assertTrue(iterator._query is query)
|
||||
self.assertEqual(iterator._limit, 13)
|
||||
self.assertEqual(iterator._offset, 29)
|
||||
|
||||
def test_next_page_no_cursors_no_more(self):
|
||||
from gcloud.datastore.query import _pb_from_query
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE)
|
||||
self._addQueryResults(connection, cursor=b'')
|
||||
iterator = self._makeOne(query, client)
|
||||
entities, more_results, cursor = iterator.next_page()
|
||||
self.assertEqual(iterator._skipped_results, None)
|
||||
|
||||
self.assertEqual(cursor, None)
|
||||
self.assertFalse(more_results)
|
||||
self.assertFalse(iterator._more_results)
|
||||
self.assertEqual(len(entities), 1)
|
||||
self.assertEqual(entities[0].key.path,
|
||||
[{'kind': self._KIND, 'id': self._ID}])
|
||||
self.assertEqual(entities[0]['foo'], u'Foo')
|
||||
qpb = _pb_from_query(query)
|
||||
qpb.offset = 0
|
||||
EXPECTED = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
self.assertEqual(connection._called_with, [EXPECTED])
|
||||
|
||||
def test_next_page_no_cursors_no_more_w_offset_and_limit(self):
|
||||
from gcloud.datastore.query import _pb_from_query
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE)
|
||||
skipped_results = object()
|
||||
self._addQueryResults(connection, cursor=b'',
|
||||
skipped_results=skipped_results)
|
||||
iterator = self._makeOne(query, client, 13, 29)
|
||||
entities, more_results, cursor = iterator.next_page()
|
||||
|
||||
self.assertEqual(cursor, None)
|
||||
self.assertFalse(more_results)
|
||||
self.assertFalse(iterator._more_results)
|
||||
self.assertEqual(iterator._skipped_results, skipped_results)
|
||||
self.assertEqual(len(entities), 1)
|
||||
self.assertEqual(entities[0].key.path,
|
||||
[{'kind': self._KIND, 'id': self._ID}])
|
||||
self.assertEqual(entities[0]['foo'], u'Foo')
|
||||
qpb = _pb_from_query(query)
|
||||
qpb.limit.value = 13
|
||||
qpb.offset = 29
|
||||
EXPECTED = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
self.assertEqual(connection._called_with, [EXPECTED])
|
||||
|
||||
def test_next_page_w_cursors_w_more(self):
|
||||
from base64 import urlsafe_b64decode
|
||||
from base64 import urlsafe_b64encode
|
||||
from gcloud.datastore.query import _pb_from_query
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE)
|
||||
self._addQueryResults(connection, cursor=self._END, more=True)
|
||||
iterator = self._makeOne(query, client)
|
||||
iterator._start_cursor = self._START
|
||||
iterator._end_cursor = self._END
|
||||
entities, more_results, cursor = iterator.next_page()
|
||||
|
||||
self.assertEqual(cursor, urlsafe_b64encode(self._END))
|
||||
self.assertTrue(more_results)
|
||||
self.assertTrue(iterator._more_results)
|
||||
self.assertEqual(iterator._skipped_results, None)
|
||||
self.assertEqual(iterator._end_cursor, None)
|
||||
self.assertEqual(urlsafe_b64decode(iterator._start_cursor), self._END)
|
||||
self.assertEqual(len(entities), 1)
|
||||
self.assertEqual(entities[0].key.path,
|
||||
[{'kind': self._KIND, 'id': self._ID}])
|
||||
self.assertEqual(entities[0]['foo'], u'Foo')
|
||||
qpb = _pb_from_query(query)
|
||||
qpb.offset = 0
|
||||
qpb.start_cursor = urlsafe_b64decode(self._START)
|
||||
qpb.end_cursor = urlsafe_b64decode(self._END)
|
||||
EXPECTED = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
self.assertEqual(connection._called_with, [EXPECTED])
|
||||
|
||||
def test_next_page_w_cursors_w_bogus_more(self):
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE)
|
||||
self._addQueryResults(connection, cursor=self._END, more=True)
|
||||
epb, cursor, _, _ = connection._results.pop()
|
||||
connection._results.append((epb, cursor, 5, None)) # invalid enum
|
||||
iterator = self._makeOne(query, client)
|
||||
self.assertRaises(ValueError, iterator.next_page)
|
||||
|
||||
def test___iter___no_more(self):
|
||||
from gcloud.datastore.query import _pb_from_query
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE)
|
||||
self._addQueryResults(connection)
|
||||
iterator = self._makeOne(query, client)
|
||||
entities = list(iterator)
|
||||
|
||||
self.assertFalse(iterator._more_results)
|
||||
self.assertEqual(len(entities), 1)
|
||||
self.assertEqual(entities[0].key.path,
|
||||
[{'kind': self._KIND, 'id': self._ID}])
|
||||
self.assertEqual(entities[0]['foo'], u'Foo')
|
||||
qpb = _pb_from_query(query)
|
||||
qpb.offset = 0
|
||||
EXPECTED = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
self.assertEqual(connection._called_with, [EXPECTED])
|
||||
|
||||
def test___iter___w_more(self):
|
||||
from gcloud.datastore.query import _pb_from_query
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE)
|
||||
self._addQueryResults(connection, cursor=self._END, more=True)
|
||||
self._addQueryResults(connection)
|
||||
iterator = self._makeOne(query, client)
|
||||
entities = list(iterator)
|
||||
|
||||
self.assertFalse(iterator._more_results)
|
||||
self.assertEqual(len(entities), 2)
|
||||
for entity in entities:
|
||||
self.assertEqual(
|
||||
entity.key.path,
|
||||
[{'kind': self._KIND, 'id': self._ID}])
|
||||
self.assertEqual(entities[1]['foo'], u'Foo')
|
||||
qpb1 = _pb_from_query(query)
|
||||
qpb2 = _pb_from_query(query)
|
||||
qpb2.start_cursor = self._END
|
||||
EXPECTED1 = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb1,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
EXPECTED2 = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb2,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
self.assertEqual(len(connection._called_with), 2)
|
||||
self.assertEqual(connection._called_with[0], EXPECTED1)
|
||||
self.assertEqual(connection._called_with[1], EXPECTED2)
|
||||
|
||||
def test___iter___w_limit(self):
|
||||
from gcloud.datastore.query import _pb_from_query
|
||||
|
||||
connection = _Connection()
|
||||
client = self._makeClient(connection)
|
||||
query = _Query(client, self._KIND, self._PROJECT, self._NAMESPACE)
|
||||
skip1 = 4
|
||||
skip2 = 9
|
||||
self._addQueryResults(connection, more=True, skipped_results=skip1,
|
||||
no_entity=True)
|
||||
self._addQueryResults(connection, more=True, skipped_results=skip2)
|
||||
self._addQueryResults(connection)
|
||||
offset = skip1 + skip2
|
||||
iterator = self._makeOne(query, client, limit=2, offset=offset)
|
||||
entities = list(iterator)
|
||||
|
||||
self.assertFalse(iterator._more_results)
|
||||
self.assertEqual(len(entities), 2)
|
||||
for entity in entities:
|
||||
self.assertEqual(
|
||||
entity.key.path,
|
||||
[{'kind': self._KIND, 'id': self._ID}])
|
||||
qpb1 = _pb_from_query(query)
|
||||
qpb1.limit.value = 2
|
||||
qpb1.offset = offset
|
||||
qpb2 = _pb_from_query(query)
|
||||
qpb2.start_cursor = self._END
|
||||
qpb2.limit.value = 2
|
||||
qpb2.offset = offset - skip1
|
||||
qpb3 = _pb_from_query(query)
|
||||
qpb3.start_cursor = self._END
|
||||
qpb3.limit.value = 1
|
||||
EXPECTED1 = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb1,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
EXPECTED2 = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb2,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
EXPECTED3 = {
|
||||
'project': self._PROJECT,
|
||||
'query_pb': qpb3,
|
||||
'namespace': self._NAMESPACE,
|
||||
'transaction_id': None,
|
||||
}
|
||||
self.assertEqual(len(connection._called_with), 3)
|
||||
self.assertEqual(connection._called_with[0], EXPECTED1)
|
||||
self.assertEqual(connection._called_with[1], EXPECTED2)
|
||||
self.assertEqual(connection._called_with[2], EXPECTED3)
|
||||
|
||||
|
||||
class Test__pb_from_query(unittest2.TestCase):
|
||||
|
||||
def _callFUT(self, query):
|
||||
from gcloud.datastore.query import _pb_from_query
|
||||
return _pb_from_query(query)
|
||||
|
||||
def test_empty(self):
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
pb = self._callFUT(_Query())
|
||||
self.assertEqual(list(pb.projection), [])
|
||||
self.assertEqual(list(pb.kind), [])
|
||||
self.assertEqual(list(pb.order), [])
|
||||
self.assertEqual(list(pb.distinct_on), [])
|
||||
self.assertEqual(pb.filter.property_filter.property.name, '')
|
||||
cfilter = pb.filter.composite_filter
|
||||
self.assertEqual(cfilter.op,
|
||||
query_pb2.CompositeFilter.OPERATOR_UNSPECIFIED)
|
||||
self.assertEqual(list(cfilter.filters), [])
|
||||
self.assertEqual(pb.start_cursor, b'')
|
||||
self.assertEqual(pb.end_cursor, b'')
|
||||
self.assertEqual(pb.limit.value, 0)
|
||||
self.assertEqual(pb.offset, 0)
|
||||
|
||||
def test_projection(self):
|
||||
pb = self._callFUT(_Query(projection=['a', 'b', 'c']))
|
||||
self.assertEqual([item.property.name for item in pb.projection],
|
||||
['a', 'b', 'c'])
|
||||
|
||||
def test_kind(self):
|
||||
pb = self._callFUT(_Query(kind='KIND'))
|
||||
self.assertEqual([item.name for item in pb.kind], ['KIND'])
|
||||
|
||||
def test_ancestor(self):
|
||||
from gcloud.datastore.key import Key
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
ancestor = Key('Ancestor', 123, project='PROJECT')
|
||||
pb = self._callFUT(_Query(ancestor=ancestor))
|
||||
cfilter = pb.filter.composite_filter
|
||||
self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND)
|
||||
self.assertEqual(len(cfilter.filters), 1)
|
||||
pfilter = cfilter.filters[0].property_filter
|
||||
self.assertEqual(pfilter.property.name, '__key__')
|
||||
ancestor_pb = ancestor.to_protobuf()
|
||||
self.assertEqual(pfilter.value.key_value, ancestor_pb)
|
||||
|
||||
def test_filter(self):
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
query = _Query(filters=[('name', '=', u'John')])
|
||||
query.OPERATORS = {
|
||||
'=': query_pb2.PropertyFilter.EQUAL,
|
||||
}
|
||||
pb = self._callFUT(query)
|
||||
cfilter = pb.filter.composite_filter
|
||||
self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND)
|
||||
self.assertEqual(len(cfilter.filters), 1)
|
||||
pfilter = cfilter.filters[0].property_filter
|
||||
self.assertEqual(pfilter.property.name, 'name')
|
||||
self.assertEqual(pfilter.value.string_value, u'John')
|
||||
|
||||
def test_filter_key(self):
|
||||
from gcloud.datastore.key import Key
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
key = Key('Kind', 123, project='PROJECT')
|
||||
query = _Query(filters=[('__key__', '=', key)])
|
||||
query.OPERATORS = {
|
||||
'=': query_pb2.PropertyFilter.EQUAL,
|
||||
}
|
||||
pb = self._callFUT(query)
|
||||
cfilter = pb.filter.composite_filter
|
||||
self.assertEqual(cfilter.op, query_pb2.CompositeFilter.AND)
|
||||
self.assertEqual(len(cfilter.filters), 1)
|
||||
pfilter = cfilter.filters[0].property_filter
|
||||
self.assertEqual(pfilter.property.name, '__key__')
|
||||
key_pb = key.to_protobuf()
|
||||
self.assertEqual(pfilter.value.key_value, key_pb)
|
||||
|
||||
def test_order(self):
|
||||
from gcloud.datastore._generated import query_pb2
|
||||
|
||||
pb = self._callFUT(_Query(order=['a', '-b', 'c']))
|
||||
self.assertEqual([item.property.name for item in pb.order],
|
||||
['a', 'b', 'c'])
|
||||
self.assertEqual([item.direction for item in pb.order],
|
||||
[query_pb2.PropertyOrder.ASCENDING,
|
||||
query_pb2.PropertyOrder.DESCENDING,
|
||||
query_pb2.PropertyOrder.ASCENDING])
|
||||
|
||||
def test_distinct_on(self):
|
||||
pb = self._callFUT(_Query(distinct_on=['a', 'b', 'c']))
|
||||
self.assertEqual([item.name for item in pb.distinct_on],
|
||||
['a', 'b', 'c'])
|
||||
|
||||
|
||||
class _Query(object):
|
||||
|
||||
def __init__(self,
|
||||
client=object(),
|
||||
kind=None,
|
||||
project=None,
|
||||
namespace=None,
|
||||
ancestor=None,
|
||||
filters=(),
|
||||
projection=(),
|
||||
order=(),
|
||||
distinct_on=()):
|
||||
self._client = client
|
||||
self.kind = kind
|
||||
self.project = project
|
||||
self.namespace = namespace
|
||||
self.ancestor = ancestor
|
||||
self.filters = filters
|
||||
self.projection = projection
|
||||
self.order = order
|
||||
self.distinct_on = distinct_on
|
||||
|
||||
|
||||
class _Connection(object):
|
||||
|
||||
_called_with = None
|
||||
_cursor = b'\x00'
|
||||
_skipped = 0
|
||||
|
||||
def __init__(self):
|
||||
self._results = []
|
||||
self._called_with = []
|
||||
|
||||
def run_query(self, **kw):
|
||||
self._called_with.append(kw)
|
||||
result, self._results = self._results[0], self._results[1:]
|
||||
return result
|
||||
|
||||
|
||||
class _Client(object):
|
||||
|
||||
def __init__(self, project, connection, namespace=None):
|
||||
self.project = project
|
||||
self.connection = connection
|
||||
self.namespace = namespace
|
||||
|
||||
@property
|
||||
def current_transaction(self):
|
||||
pass
|
223
venv/Lib/site-packages/gcloud/datastore/test_transaction.py
Normal file
223
venv/Lib/site-packages/gcloud/datastore/test_transaction.py
Normal file
|
@ -0,0 +1,223 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import unittest2
|
||||
|
||||
|
||||
class TestTransaction(unittest2.TestCase):
|
||||
|
||||
def _getTargetClass(self):
|
||||
from gcloud.datastore.transaction import Transaction
|
||||
return Transaction
|
||||
|
||||
def _makeOne(self, client, **kw):
|
||||
return self._getTargetClass()(client, **kw)
|
||||
|
||||
def test_ctor_defaults(self):
|
||||
from gcloud.datastore._generated import datastore_pb2
|
||||
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
self.assertEqual(xact.project, _PROJECT)
|
||||
self.assertEqual(xact.connection, connection)
|
||||
self.assertEqual(xact.id, None)
|
||||
self.assertEqual(xact._status, self._getTargetClass()._INITIAL)
|
||||
self.assertTrue(isinstance(xact._commit_request,
|
||||
datastore_pb2.CommitRequest))
|
||||
self.assertTrue(xact.mutations is xact._commit_request.mutations)
|
||||
self.assertEqual(len(xact._partial_key_entities), 0)
|
||||
|
||||
def test_current(self):
|
||||
from gcloud.datastore.test_client import _NoCommitBatch
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection()
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact1 = self._makeOne(client)
|
||||
xact2 = self._makeOne(client)
|
||||
self.assertTrue(xact1.current() is None)
|
||||
self.assertTrue(xact2.current() is None)
|
||||
with xact1:
|
||||
self.assertTrue(xact1.current() is xact1)
|
||||
self.assertTrue(xact2.current() is xact1)
|
||||
with _NoCommitBatch(client):
|
||||
self.assertTrue(xact1.current() is None)
|
||||
self.assertTrue(xact2.current() is None)
|
||||
with xact2:
|
||||
self.assertTrue(xact1.current() is xact2)
|
||||
self.assertTrue(xact2.current() is xact2)
|
||||
with _NoCommitBatch(client):
|
||||
self.assertTrue(xact1.current() is None)
|
||||
self.assertTrue(xact2.current() is None)
|
||||
self.assertTrue(xact1.current() is xact1)
|
||||
self.assertTrue(xact2.current() is xact1)
|
||||
self.assertTrue(xact1.current() is None)
|
||||
self.assertTrue(xact2.current() is None)
|
||||
|
||||
def test_begin(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection(234)
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
xact.begin()
|
||||
self.assertEqual(xact.id, 234)
|
||||
self.assertEqual(connection._begun, _PROJECT)
|
||||
|
||||
def test_begin_tombstoned(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection(234)
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
xact.begin()
|
||||
self.assertEqual(xact.id, 234)
|
||||
self.assertEqual(connection._begun, _PROJECT)
|
||||
|
||||
xact.rollback()
|
||||
self.assertEqual(xact.id, None)
|
||||
|
||||
self.assertRaises(ValueError, xact.begin)
|
||||
|
||||
def test_rollback(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection(234)
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
xact.begin()
|
||||
xact.rollback()
|
||||
self.assertEqual(xact.id, None)
|
||||
self.assertEqual(connection._rolled_back, (_PROJECT, 234))
|
||||
|
||||
def test_commit_no_partial_keys(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection(234)
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
xact._commit_request = commit_request = object()
|
||||
xact.begin()
|
||||
xact.commit()
|
||||
self.assertEqual(connection._committed,
|
||||
(_PROJECT, commit_request, 234))
|
||||
self.assertEqual(xact.id, None)
|
||||
|
||||
def test_commit_w_partial_keys(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
_KIND = 'KIND'
|
||||
_ID = 123
|
||||
connection = _Connection(234)
|
||||
connection._completed_keys = [_make_key(_KIND, _ID, _PROJECT)]
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
entity = _Entity()
|
||||
xact.put(entity)
|
||||
xact._commit_request = commit_request = object()
|
||||
xact.begin()
|
||||
xact.commit()
|
||||
self.assertEqual(connection._committed,
|
||||
(_PROJECT, commit_request, 234))
|
||||
self.assertEqual(xact.id, None)
|
||||
self.assertEqual(entity.key.path, [{'kind': _KIND, 'id': _ID}])
|
||||
|
||||
def test_context_manager_no_raise(self):
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection(234)
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
xact._commit_request = commit_request = object()
|
||||
with xact:
|
||||
self.assertEqual(xact.id, 234)
|
||||
self.assertEqual(connection._begun, _PROJECT)
|
||||
self.assertEqual(connection._committed,
|
||||
(_PROJECT, commit_request, 234))
|
||||
self.assertEqual(xact.id, None)
|
||||
|
||||
def test_context_manager_w_raise(self):
|
||||
|
||||
class Foo(Exception):
|
||||
pass
|
||||
|
||||
_PROJECT = 'PROJECT'
|
||||
connection = _Connection(234)
|
||||
client = _Client(_PROJECT, connection)
|
||||
xact = self._makeOne(client)
|
||||
xact._mutation = object()
|
||||
try:
|
||||
with xact:
|
||||
self.assertEqual(xact.id, 234)
|
||||
self.assertEqual(connection._begun, _PROJECT)
|
||||
raise Foo()
|
||||
except Foo:
|
||||
self.assertEqual(xact.id, None)
|
||||
self.assertEqual(connection._rolled_back, (_PROJECT, 234))
|
||||
self.assertEqual(connection._committed, None)
|
||||
self.assertEqual(xact.id, None)
|
||||
|
||||
|
||||
def _make_key(kind, id_, project):
|
||||
from gcloud.datastore._generated import entity_pb2
|
||||
|
||||
key = entity_pb2.Key()
|
||||
key.partition_id.project_id = project
|
||||
elem = key.path.add()
|
||||
elem.kind = kind
|
||||
elem.id = id_
|
||||
return key
|
||||
|
||||
|
||||
class _Connection(object):
|
||||
_marker = object()
|
||||
_begun = _rolled_back = _committed = None
|
||||
|
||||
def __init__(self, xact_id=123):
|
||||
self._xact_id = xact_id
|
||||
self._completed_keys = []
|
||||
self._index_updates = 0
|
||||
|
||||
def begin_transaction(self, project):
|
||||
self._begun = project
|
||||
return self._xact_id
|
||||
|
||||
def rollback(self, project, transaction_id):
|
||||
self._rolled_back = project, transaction_id
|
||||
|
||||
def commit(self, project, commit_request, transaction_id):
|
||||
self._committed = (project, commit_request, transaction_id)
|
||||
return self._index_updates, self._completed_keys
|
||||
|
||||
|
||||
class _Entity(dict):
|
||||
|
||||
def __init__(self):
|
||||
super(_Entity, self).__init__()
|
||||
from gcloud.datastore.key import Key
|
||||
self.key = Key('KIND', project='PROJECT')
|
||||
|
||||
|
||||
class _Client(object):
|
||||
|
||||
def __init__(self, project, connection, namespace=None):
|
||||
self.project = project
|
||||
self.connection = connection
|
||||
self.namespace = namespace
|
||||
self._batches = []
|
||||
|
||||
def _push_batch(self, batch):
|
||||
self._batches.insert(0, batch)
|
||||
|
||||
def _pop_batch(self):
|
||||
return self._batches.pop(0)
|
||||
|
||||
@property
|
||||
def current_batch(self):
|
||||
return self._batches and self._batches[0] or None
|
162
venv/Lib/site-packages/gcloud/datastore/transaction.py
Normal file
162
venv/Lib/site-packages/gcloud/datastore/transaction.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
# Copyright 2014 Google Inc. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Create / interact with gcloud datastore transactions."""
|
||||
|
||||
from gcloud.datastore.batch import Batch
|
||||
|
||||
|
||||
class Transaction(Batch):
|
||||
"""An abstraction representing datastore Transactions.
|
||||
|
||||
Transactions can be used to build up a bulk mutation and ensure all
|
||||
or none succeed (transactionally).
|
||||
|
||||
For example, the following snippet of code will put the two ``save``
|
||||
operations (either ``insert`` or ``upsert``) into the same
|
||||
mutation, and execute those within a transaction::
|
||||
|
||||
>>> from gcloud import datastore
|
||||
>>> client = datastore.Client()
|
||||
>>> with client.transaction():
|
||||
... client.put_multi([entity1, entity2])
|
||||
|
||||
Because it derives from :class:`Batch <.datastore.batch.Batch>`,
|
||||
:class:`Transaction` also provides :meth:`put` and :meth:`delete` methods::
|
||||
|
||||
>>> with client.transaction() as xact:
|
||||
... xact.put(entity1)
|
||||
... xact.delete(entity2.key)
|
||||
|
||||
By default, the transaction is rolled back if the transaction block
|
||||
exits with an error::
|
||||
|
||||
>>> with client.transaction():
|
||||
... do_some_work()
|
||||
... raise SomeException() # rolls back
|
||||
|
||||
If the transaction block exists without an exception, it will commit
|
||||
by default.
|
||||
|
||||
.. warning:: Inside a transaction, automatically assigned IDs for
|
||||
entities will not be available at save time! That means, if you
|
||||
try::
|
||||
|
||||
>>> with client.transaction():
|
||||
... entity = datastore.Entity(key=client.key('Thing'))
|
||||
... client.put(entity)
|
||||
|
||||
``entity`` won't have a complete key until the transaction is
|
||||
committed.
|
||||
|
||||
Once you exit the transaction (or call :meth:`commit`), the
|
||||
automatically generated ID will be assigned to the entity::
|
||||
|
||||
>>> with client.transaction():
|
||||
... entity = datastore.Entity(key=client.key('Thing'))
|
||||
... client.put(entity)
|
||||
... print(entity.key.is_partial) # There is no ID on this key.
|
||||
...
|
||||
True
|
||||
>>> print(entity.key.is_partial) # There *is* an ID.
|
||||
False
|
||||
|
||||
If you don't want to use the context manager you can initialize a
|
||||
transaction manually::
|
||||
|
||||
>>> transaction = client.transaction()
|
||||
>>> transaction.begin()
|
||||
>>>
|
||||
>>> entity = datastore.Entity(key=client.key('Thing'))
|
||||
>>> transaction.put(entity)
|
||||
>>>
|
||||
>>> if error:
|
||||
... transaction.rollback()
|
||||
... else:
|
||||
... transaction.commit()
|
||||
|
||||
:type client: :class:`gcloud.datastore.client.Client`
|
||||
:param client: the client used to connect to datastore.
|
||||
"""
|
||||
|
||||
def __init__(self, client):
|
||||
super(Transaction, self).__init__(client)
|
||||
self._id = None
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
"""Getter for the transaction ID.
|
||||
|
||||
:rtype: string
|
||||
:returns: The ID of the current transaction.
|
||||
"""
|
||||
return self._id
|
||||
|
||||
def current(self):
|
||||
"""Return the topmost transaction.
|
||||
|
||||
.. note::
|
||||
|
||||
If the topmost element on the stack is not a transaction,
|
||||
returns None.
|
||||
|
||||
:rtype: :class:`gcloud.datastore.transaction.Transaction` or None
|
||||
"""
|
||||
top = super(Transaction, self).current()
|
||||
if isinstance(top, Transaction):
|
||||
return top
|
||||
|
||||
def begin(self):
|
||||
"""Begins a transaction.
|
||||
|
||||
This method is called automatically when entering a with
|
||||
statement, however it can be called explicitly if you don't want
|
||||
to use a context manager.
|
||||
|
||||
:raises: :class:`ValueError` if the transaction has already begun.
|
||||
"""
|
||||
super(Transaction, self).begin()
|
||||
self._id = self.connection.begin_transaction(self.project)
|
||||
|
||||
def rollback(self):
|
||||
"""Rolls back the current transaction.
|
||||
|
||||
This method has necessary side-effects:
|
||||
|
||||
- Sets the current connection's transaction reference to None.
|
||||
- Sets the current transaction's ID to None.
|
||||
"""
|
||||
try:
|
||||
self.connection.rollback(self.project, self._id)
|
||||
finally:
|
||||
super(Transaction, self).rollback()
|
||||
# Clear our own ID in case this gets accidentally reused.
|
||||
self._id = None
|
||||
|
||||
def commit(self):
|
||||
"""Commits the transaction.
|
||||
|
||||
This is called automatically upon exiting a with statement,
|
||||
however it can be called explicitly if you don't want to use a
|
||||
context manager.
|
||||
|
||||
This method has necessary side-effects:
|
||||
|
||||
- Sets the current transaction's ID to None.
|
||||
"""
|
||||
try:
|
||||
super(Transaction, self).commit()
|
||||
finally:
|
||||
# Clear our own ID in case this gets accidentally reused.
|
||||
self._id = None
|
Loading…
Add table
Add a link
Reference in a new issue