diff --git a/.travis.yml b/.travis.yml
index 4e202b273d71..a1a2f9408d63 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -12,6 +12,7 @@ install:
script:
- tox -e py26
- tox -e py27
+ - tox -e py34
- tox -e lint
- tox -e regression
- scripts/merge.sh
diff --git a/_gcloud_vendor/apitools/base/py/http_wrapper.py b/_gcloud_vendor/apitools/base/py/http_wrapper.py
index aa47fbc65602..8b8b6cfc08aa 100644
--- a/_gcloud_vendor/apitools/base/py/http_wrapper.py
+++ b/_gcloud_vendor/apitools/base/py/http_wrapper.py
@@ -9,11 +9,11 @@
import logging
import socket
import time
-import urlparse
import httplib2
from six.moves import http_client
from six.moves import range
+from six.moves.urllib.parse import urlsplit
from _gcloud_vendor.apitools.base.py import exceptions
from _gcloud_vendor.apitools.base.py import util
@@ -127,7 +127,7 @@ def MakeRequest(http, http_request, retries=5, redirections=5):
# wants control over the underlying connection for managing callbacks
# or hash digestion.
if getattr(http, 'connections', None):
- url_scheme = urlparse.urlsplit(http_request.url).scheme
+ url_scheme = urlsplit(http_request.url).scheme
if url_scheme and url_scheme in http.connections:
connection_type = http.connections[url_scheme]
for retry in range(retries + 1):
diff --git a/_gcloud_vendor/apitools/base/py/transfer.py b/_gcloud_vendor/apitools/base/py/transfer.py
index 1b55651ab1aa..c98d5798b5eb 100644
--- a/_gcloud_vendor/apitools/base/py/transfer.py
+++ b/_gcloud_vendor/apitools/base/py/transfer.py
@@ -9,7 +9,6 @@
import json
import mimetypes
import os
-import StringIO
import threading
from six.moves import http_client
@@ -568,7 +567,7 @@ def __ConfigureMultipartRequest(self, http_request):
# encode the body: note that we can't use `as_string`, because
# it plays games with `From ` lines.
- fp = StringIO.StringIO()
+ fp = io.StringIO()
g = email_generator.Generator(fp, mangle_from_=False)
g.flatten(msg_root, unixfrom=False)
http_request.body = fp.getvalue()
diff --git a/_gcloud_vendor/apitools/base/py/util.py b/_gcloud_vendor/apitools/base/py/util.py
index c2444c4e1948..3c3fff53768b 100644
--- a/_gcloud_vendor/apitools/base/py/util.py
+++ b/_gcloud_vendor/apitools/base/py/util.py
@@ -4,11 +4,12 @@
import collections
import os
import random
-import urllib
-import urllib2
import six
from six.moves import http_client
+from six.moves.urllib.error import URLError
+from six.moves.urllib.parse import quote
+from six.moves.urllib.request import urlopen
from _gcloud_vendor.apitools.base.py import exceptions
@@ -44,8 +45,8 @@ def DetectGce():
True iff we're running on a GCE instance.
"""
try:
- o = urllib2.urlopen('http://metadata.google.internal')
- except urllib2.URLError:
+ o = urlopen('http://metadata.google.internal')
+ except URLError:
return False
return (o.getcode() == http_client.OK and
o.headers.get('metadata-flavor') == 'Google')
@@ -103,7 +104,7 @@ def ExpandRelativePath(method_config, params, relative_path=None):
if not isinstance(value, six.string_types):
value = str(value)
path = path.replace(param_template,
- urllib.quote(value.encode('utf_8'), reserved_chars))
+ quote(value.encode('utf_8'), reserved_chars))
except TypeError as e:
raise exceptions.InvalidUserInputError(
'Error setting required parameter %s to value %s: %s' % (
diff --git a/gcloud/credentials.py b/gcloud/credentials.py
index 16b5b159b4e9..582537f324cc 100644
--- a/gcloud/credentials.py
+++ b/gcloud/credentials.py
@@ -17,8 +17,8 @@
import base64
import calendar
import datetime
-import urllib
import six
+from six.moves.urllib.parse import urlencode # pylint: disable=F0401
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
@@ -260,4 +260,4 @@ def generate_signed_url(credentials, resource, expiration,
# Return the built URL.
return '{endpoint}{resource}?{querystring}'.format(
endpoint=api_access_endpoint, resource=resource,
- querystring=urllib.urlencode(query_params))
+ querystring=urlencode(query_params))
diff --git a/gcloud/datastore/_datastore_v1.proto b/gcloud/datastore/_datastore_v1.proto
new file mode 100644
index 000000000000..3d562709ddef
--- /dev/null
+++ b/gcloud/datastore/_datastore_v1.proto
@@ -0,0 +1,594 @@
+// Copyright 2013 Google Inc. All Rights Reserved.
+//
+// The datastore v1 service proto definitions
+
+syntax = "proto2";
+
+package api.services.datastore;
+option java_package = "com.google.api.services.datastore";
+
+
+// An identifier for a particular subset of entities.
+//
+// Entities are partitioned into various subsets, each used by different
+// datasets and different namespaces within a dataset and so forth.
+//
+// All input partition IDs are normalized before use.
+// A partition ID is normalized as follows:
+// If the partition ID is unset or is set to an empty partition ID, replace it
+// with the context partition ID.
+// Otherwise, if the partition ID has no dataset ID, assign it the context
+// partition ID's dataset ID.
+// Unless otherwise documented, the context partition ID has the dataset ID set
+// to the context dataset ID and no other partition dimension set.
+//
+// A partition ID is empty if all of its fields are unset.
+//
+// Partition dimension:
+// A dimension may be unset.
+// A dimension's value must never be "".
+// A dimension's value must match [A-Za-z\d\.\-_]{1,100}
+// If the value of any dimension matches regex "__.*__",
+// the partition is reserved/read-only.
+// A reserved/read-only partition ID is forbidden in certain documented contexts.
+//
+// Dataset ID:
+// A dataset id's value must never be "".
+// A dataset id's value must match
+// ([a-z\d\-]{1,100}~)?([a-z\d][a-z\d\-\.]{0,99}:)?([a-z\d][a-z\d\-]{0,99}
+message PartitionId {
+ // The dataset ID.
+ optional string dataset_id = 3;
+ // The namespace.
+ optional string namespace = 4;
+}
+
+// A unique identifier for an entity.
+// If a key's partition id or any of its path kinds or names are
+// reserved/read-only, the key is reserved/read-only.
+// A reserved/read-only key is forbidden in certain documented contexts.
+message Key {
+ // Entities are partitioned into subsets, currently identified by a dataset
+ // (usually implicitly specified by the project) and namespace ID.
+ // Queries are scoped to a single partition.
+ optional PartitionId partition_id = 1;
+
+ // A (kind, ID/name) pair used to construct a key path.
+ //
+ // At most one of name or ID may be set.
+ // If either is set, the element is complete.
+ // If neither is set, the element is incomplete.
+ message PathElement {
+ // The kind of the entity.
+ // A kind matching regex "__.*__" is reserved/read-only.
+ // A kind must not contain more than 500 characters.
+ // Cannot be "".
+ required string kind = 1;
+ // The ID of the entity.
+ // Never equal to zero. Values less than zero are discouraged and will not
+ // be supported in the future.
+ optional int64 id = 2;
+ // The name of the entity.
+ // A name matching regex "__.*__" is reserved/read-only.
+ // A name must not be more than 500 characters.
+ // Cannot be "".
+ optional string name = 3;
+ }
+
+ // The entity path.
+ // An entity path consists of one or more elements composed of a kind and a
+ // string or numerical identifier, which identify entities. The first
+ // element identifies a root entity, the second element identifies
+ // a child of the root entity, the third element a child of the
+ // second entity, and so forth. The entities identified by all prefixes of
+ // the path are called the element's ancestors.
+ // An entity path is always fully complete: ALL of the entity's ancestors
+ // are required to be in the path along with the entity identifier itself.
+ // The only exception is that in some documented cases, the identifier in the
+ // last path element (for the entity) itself may be omitted. A path can never
+ // be empty.
+ repeated PathElement path_element = 2;
+}
+
+// A message that can hold any of the supported value types and associated
+// metadata.
+//
+// At most one of the Value fields may be set.
+// If none are set the value is "null".
+//
+message Value {
+ // A boolean value.
+ optional bool boolean_value = 1;
+ // An integer value.
+ optional int64 integer_value = 2;
+ // A double value.
+ optional double double_value = 3;
+ // A timestamp value.
+ optional int64 timestamp_microseconds_value = 4;
+ // A key value.
+ optional Key key_value = 5;
+ // A blob key value.
+ optional string blob_key_value = 16;
+ // A UTF-8 encoded string value.
+ optional string string_value = 17;
+ // A blob value.
+ optional bytes blob_value = 18;
+ // An entity value.
+ // May have no key.
+ // May have a key with an incomplete key path.
+ // May have a reserved/read-only key.
+ optional Entity entity_value = 6;
+ // A list value.
+ // Cannot contain another list value.
+ // Cannot also have a meaning and indexing set.
+ repeated Value list_value = 7;
+
+ // The meaning field is reserved and should not be used.
+ optional int32 meaning = 14;
+
+ // If the value should be indexed.
+ //
+ // The indexed property may be set for a
+ // null value.
+ // When indexed is true, stringValue
+ // is limited to 500 characters and the blob value is limited to 500 bytes.
+ // Exception: If meaning is set to 2, string_value is limited to 2038
+ // characters regardless of indexed.
+ // When indexed is true, meaning 15 and 22 are not allowed, and meaning 16
+ // will be ignored on input (and will never be set on output).
+ // Input values by default have indexed set to
+ // true; however, you can explicitly set indexed to
+ // true if you want. (An output value never has
+ // indexed explicitly set to true.) If a value is
+ // itself an entity, it cannot have indexed set to
+ // true.
+ // Exception: An entity value with meaning 9, 20 or 21 may be indexed.
+ optional bool indexed = 15 [default = true];
+}
+
+// An entity property.
+message Property {
+ // The name of the property.
+ // A property name matching regex "__.*__" is reserved.
+ // A reserved property name is forbidden in certain documented contexts.
+ // The name must not contain more than 500 characters.
+ // Cannot be "".
+ required string name = 1;
+
+ // The value(s) of the property.
+ // Each value can have only one value property populated. For example,
+ // you cannot have a values list of { value: { integerValue: 22,
+ // stringValue: "a" } }, but you can have { value: { listValue:
+ // [ { integerValue: 22 }, { stringValue: "a" } ] }.
+ required Value value = 4;
+}
+
+// An entity.
+//
+// An entity is limited to 1 megabyte when stored. That roughly
+// corresponds to a limit of 1 megabyte for the serialized form of this
+// message.
+message Entity {
+ // The entity's key.
+ //
+ // An entity must have a key, unless otherwise documented (for example,
+ // an entity in Value.entityValue may have no key).
+ // An entity's kind is its key's path's last element's kind,
+ // or null if it has no key.
+ optional Key key = 1;
+ // The entity's properties.
+ // Each property's name must be unique for its entity.
+ repeated Property property = 2;
+}
+
+// The result of fetching an entity from the datastore.
+message EntityResult {
+ // Specifies what data the 'entity' field contains.
+ // A ResultType is either implied (for example, in LookupResponse.found it
+ // is always FULL) or specified by context (for example, in message
+ // QueryResultBatch, field 'entity_result_type' specifies a ResultType
+ // for all the values in field 'entity_result').
+ enum ResultType {
+ FULL = 1; // The entire entity.
+ PROJECTION = 2; // A projected subset of properties.
+ // The entity may have no key.
+ // A property value may have meaning 18.
+ KEY_ONLY = 3; // Only the key.
+ }
+
+ // The resulting entity.
+ required Entity entity = 1;
+}
+
+// A query.
+message Query {
+ // The projection to return. If not set the entire entity is returned.
+ repeated PropertyExpression projection = 2;
+
+ // The kinds to query (if empty, returns entities from all kinds).
+ repeated KindExpression kind = 3;
+
+ // The filter to apply (optional).
+ optional Filter filter = 4;
+
+ // The order to apply to the query results (if empty, order is unspecified).
+ repeated PropertyOrder order = 5;
+
+ // The properties to group by (if empty, no grouping is applied to the
+ // result set).
+ repeated PropertyReference group_by = 6;
+
+ // A starting point for the query results. Optional. Query cursors are
+ // returned in query result batches.
+ optional bytes /* serialized QueryCursor */ start_cursor = 7;
+
+ // An ending point for the query results. Optional. Query cursors are
+ // returned in query result batches.
+ optional bytes /* serialized QueryCursor */ end_cursor = 8;
+
+ // The number of results to skip. Applies before limit, but after all other
+ // constraints (optional, defaults to 0).
+ optional int32 offset = 10 [default=0];
+
+ // The maximum number of results to return. Applies after all other
+ // constraints. Optional.
+ optional int32 limit = 11;
+}
+
+// A representation of a kind.
+message KindExpression {
+ // The name of the kind.
+ required string name = 1;
+}
+
+// A reference to a property relative to the kind expressions.
+// exactly.
+message PropertyReference {
+ // The name of the property.
+ required string name = 2;
+}
+
+// A representation of a property in a projection.
+message PropertyExpression {
+ enum AggregationFunction {
+ FIRST = 1;
+ }
+ // The property to project.
+ required PropertyReference property = 1;
+ // The aggregation function to apply to the property. Optional.
+ // Can only be used when grouping by at least one property. Must
+ // then be set on all properties in the projection that are not
+ // being grouped by.
+ optional AggregationFunction aggregation_function = 2;
+}
+
+// The desired order for a specific property.
+message PropertyOrder {
+ enum Direction {
+ ASCENDING = 1;
+ DESCENDING = 2;
+ }
+ // The property to order by.
+ required PropertyReference property = 1;
+ // The direction to order by.
+ optional Direction direction = 2 [default=ASCENDING];
+}
+
+// A holder for any type of filter. Exactly one field should be specified.
+message Filter {
+ // A composite filter.
+ optional CompositeFilter composite_filter = 1;
+ // A filter on a property.
+ optional PropertyFilter property_filter = 2;
+}
+
+// A filter that merges the multiple other filters using the given operation.
+message CompositeFilter {
+ enum Operator {
+ AND = 1;
+ }
+
+ // The operator for combining multiple filters.
+ required Operator operator = 1;
+ // The list of filters to combine.
+ // Must contain at least one filter.
+ repeated Filter filter = 2;
+}
+
+// A filter on a specific property.
+message PropertyFilter {
+ enum Operator {
+ LESS_THAN = 1;
+ LESS_THAN_OR_EQUAL = 2;
+ GREATER_THAN = 3;
+ GREATER_THAN_OR_EQUAL = 4;
+ EQUAL = 5;
+
+ HAS_ANCESTOR = 11;
+ }
+
+ // The property to filter by.
+ required PropertyReference property = 1;
+ // The operator to filter by.
+ required Operator operator = 2;
+ // The value to compare the property to.
+ required Value value = 3;
+}
+
+// A GQL query.
+message GqlQuery {
+ required string query_string = 1;
+ // When false, the query string must not contain a literal.
+ optional bool allow_literal = 2 [default = false];
+ // A named argument must set field GqlQueryArg.name.
+ // No two named arguments may have the same name.
+ // For each non-reserved named binding site in the query string,
+ // there must be a named argument with that name,
+ // but not necessarily the inverse.
+ repeated GqlQueryArg name_arg = 3;
+ // Numbered binding site @1 references the first numbered argument,
+ // effectively using 1-based indexing, rather than the usual 0.
+ // A numbered argument must NOT set field GqlQueryArg.name.
+ // For each binding site numbered i in query_string,
+ // there must be an ith numbered argument.
+ // The inverse must also be true.
+ repeated GqlQueryArg number_arg = 4;
+}
+
+// A binding argument for a GQL query.
+// Exactly one of fields value and cursor must be set.
+message GqlQueryArg {
+ // Must match regex "[A-Za-z_$][A-Za-z_$0-9]*".
+ // Must not match regex "__.*__".
+ // Must not be "".
+ optional string name = 1;
+ optional Value value = 2;
+ optional bytes cursor = 3;
+}
+
+// A batch of results produced by a query.
+message QueryResultBatch {
+ // The possible values for the 'more_results' field.
+ enum MoreResultsType {
+ NOT_FINISHED = 1; // There are additional batches to fetch from this query.
+ MORE_RESULTS_AFTER_LIMIT = 2; // The query is finished, but there are more
+ // results after the limit.
+ NO_MORE_RESULTS = 3; // The query has been exhausted.
+ }
+
+ // The result type for every entity in entityResults.
+ required EntityResult.ResultType entity_result_type = 1;
+ // The results for this batch.
+ repeated EntityResult entity_result = 2;
+
+ // A cursor that points to the position after the last result in the batch.
+ // May be absent.
+ optional bytes /* serialized QueryCursor */ end_cursor = 4;
+
+ // The state of the query after the current batch.
+ required MoreResultsType more_results = 5;
+
+ // The number of results skipped because of Query.offset.
+ optional int32 skipped_results = 6;
+}
+
+// A set of changes to apply.
+//
+// No entity in this message may have a reserved property name,
+// not even a property in an entity in a value.
+// No value in this message may have meaning 18,
+// not even a value in an entity in another value.
+//
+// If entities with duplicate keys are present, an arbitrary choice will
+// be made as to which is written.
+message Mutation {
+ // Entities to upsert.
+ // Each upserted entity's key must have a complete path and
+ // must not be reserved/read-only.
+ repeated Entity upsert = 1;
+ // Entities to update.
+ // Each updated entity's key must have a complete path and
+ // must not be reserved/read-only.
+ repeated Entity update = 2;
+ // Entities to insert.
+ // Each inserted entity's key must have a complete path and
+ // must not be reserved/read-only.
+ repeated Entity insert = 3;
+ // Insert entities with a newly allocated ID.
+ // Each inserted entity's key must omit the final identifier in its path and
+ // must not be reserved/read-only.
+ repeated Entity insert_auto_id = 4;
+ // Keys of entities to delete.
+ // Each key must have a complete key path and must not be reserved/read-only.
+ repeated Key delete = 5;
+ // Ignore a user specified read-only period. Optional.
+ optional bool force = 6;
+}
+
+// The result of applying a mutation.
+message MutationResult {
+ // Number of index writes.
+ required int32 index_updates = 1;
+ // Keys for insertAutoId entities. One per entity from the
+ // request, in the same order.
+ repeated Key insert_auto_id_key = 2;
+}
+
+// Options shared by read requests.
+message ReadOptions {
+ enum ReadConsistency {
+ DEFAULT = 0;
+ STRONG = 1;
+ EVENTUAL = 2;
+ }
+
+ // The read consistency to use.
+ // Cannot be set when transaction is set.
+ // Lookup and ancestor queries default to STRONG, global queries default to
+ // EVENTUAL and cannot be set to STRONG.
+ optional ReadConsistency read_consistency = 1 [default=DEFAULT];
+
+ // The transaction to use. Optional.
+ optional bytes /* serialized Transaction */ transaction = 2;
+}
+
+// The request for Lookup.
+message LookupRequest {
+
+ // Options for this lookup request. Optional.
+ optional ReadOptions read_options = 1;
+ // Keys of entities to look up from the datastore.
+ repeated Key key = 3;
+}
+
+// The response for Lookup.
+message LookupResponse {
+
+ // The order of results in these fields is undefined and has no relation to
+ // the order of the keys in the input.
+
+ // Entities found as ResultType.FULL entities.
+ repeated EntityResult found = 1;
+
+ // Entities not found as ResultType.KEY_ONLY entities.
+ repeated EntityResult missing = 2;
+
+ // A list of keys that were not looked up due to resource constraints.
+ repeated Key deferred = 3;
+}
+
+
+// The request for RunQuery.
+message RunQueryRequest {
+
+ // The options for this query.
+ optional ReadOptions read_options = 1;
+
+ // Entities are partitioned into subsets, identified by a dataset (usually
+ // implicitly specified by the project) and namespace ID. Queries are scoped
+ // to a single partition.
+ // This partition ID is normalized with the standard default context
+ // partition ID, but all other partition IDs in RunQueryRequest are
+ // normalized with this partition ID as the context partition ID.
+ optional PartitionId partition_id = 2;
+
+ // The query to run.
+ // Either this field or field gql_query must be set, but not both.
+ optional Query query = 3;
+ // The GQL query to run.
+ // Either this field or field query must be set, but not both.
+ optional GqlQuery gql_query = 7;
+}
+
+// The response for RunQuery.
+message RunQueryResponse {
+
+ // A batch of query results (always present).
+ optional QueryResultBatch batch = 1;
+
+}
+
+// The request for BeginTransaction.
+message BeginTransactionRequest {
+
+ enum IsolationLevel {
+ SNAPSHOT = 0; // Read from a consistent snapshot. Concurrent transactions
+ // conflict if their mutations conflict. For example:
+ // Read(A),Write(B) may not conflict with Read(B),Write(A),
+ // but Read(B),Write(B) does conflict with Read(B),Write(B).
+ SERIALIZABLE = 1; // Read from a consistent snapshot. Concurrent
+ // transactions conflict if they cannot be serialized.
+ // For example Read(A),Write(B) does conflict with
+ // Read(B),Write(A) but Read(A) may not conflict with
+ // Write(A).
+ }
+
+ // The transaction isolation level.
+ optional IsolationLevel isolation_level = 1 [default=SNAPSHOT];
+}
+
+// The response for BeginTransaction.
+message BeginTransactionResponse {
+
+ // The transaction identifier (always present).
+ optional bytes /* serialized Transaction */ transaction = 1;
+}
+
+// The request for Rollback.
+message RollbackRequest {
+
+ // The transaction identifier, returned by a call to
+ // beginTransaction.
+ required bytes /* serialized Transaction */ transaction = 1;
+}
+
+// The response for Rollback.
+message RollbackResponse {
+// Empty
+}
+
+// The request for Commit.
+message CommitRequest {
+
+ enum Mode {
+ TRANSACTIONAL = 1;
+ NON_TRANSACTIONAL = 2;
+ }
+
+ // The transaction identifier, returned by a call to
+ // beginTransaction. Must be set when mode is TRANSACTIONAL.
+ optional bytes /* serialized Transaction */ transaction = 1;
+ // The mutation to perform. Optional.
+ optional Mutation mutation = 2;
+ // The type of commit to perform. Either TRANSACTIONAL or NON_TRANSACTIONAL.
+ optional Mode mode = 5 [default=TRANSACTIONAL];
+}
+
+// The response for Commit.
+message CommitResponse {
+
+ // The result of performing the mutation (if any).
+ optional MutationResult mutation_result = 1;
+}
+
+// The request for AllocateIds.
+message AllocateIdsRequest {
+
+ // A list of keys with incomplete key paths to allocate IDs for.
+ // No key may be reserved/read-only.
+ repeated Key key = 1;
+}
+
+// The response for AllocateIds.
+message AllocateIdsResponse {
+
+ // The keys specified in the request (in the same order), each with
+ // its key path completed with a newly allocated ID.
+ repeated Key key = 1;
+}
+
+// Each rpc normalizes the partition IDs of the keys in its input entities,
+// and always returns entities with keys with normalized partition IDs.
+// (Note that applies to all entities, including entities in values.)
+service DatastoreService {
+ // Look up some entities by key.
+ rpc Lookup(LookupRequest) returns (LookupResponse) {
+ };
+ // Query for entities.
+ rpc RunQuery(RunQueryRequest) returns (RunQueryResponse) {
+ };
+ // Begin a new transaction.
+ rpc BeginTransaction(BeginTransactionRequest) returns (BeginTransactionResponse) {
+ };
+ // Commit a transaction, optionally creating, deleting or modifying some
+ // entities.
+ rpc Commit(CommitRequest) returns (CommitResponse) {
+ };
+ // Roll back a transaction.
+ rpc Rollback(RollbackRequest) returns (RollbackResponse) {
+ };
+ // Allocate IDs for incomplete keys (useful for referencing an entity before
+ // it is inserted).
+ rpc AllocateIds(AllocateIdsRequest) returns (AllocateIdsResponse) {
+ };
+}
diff --git a/gcloud/datastore/_datastore_v1_pb2.py b/gcloud/datastore/_datastore_v1_pb2.py
index e0023a7153a6..e31b4baec791 100644
--- a/gcloud/datastore/_datastore_v1_pb2.py
+++ b/gcloud/datastore/_datastore_v1_pb2.py
@@ -1,33 +1,26 @@
-# Copyright 2014 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: datastore_v1.proto
+# source: _datastore_v1.proto
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
+_sym_db = _symbol_database.Default()
+
DESCRIPTOR = _descriptor.FileDescriptor(
- name='datastore_v1.proto',
+ name='_datastore_v1.proto',
package='api.services.datastore',
- serialized_pb='\n\x12\x64\x61tastore_v1.proto\x12\x16\x61pi.services.datastore\"4\n\x0bPartitionId\x12\x12\n\ndataset_id\x18\x03 \x01(\t\x12\x11\n\tnamespace\x18\x04 \x01(\t\"\xb6\x01\n\x03Key\x12\x39\n\x0cpartition_id\x18\x01 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12=\n\x0cpath_element\x18\x02 \x03(\x0b\x32\'.api.services.datastore.Key.PathElement\x1a\x35\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xf4\x02\n\x05Value\x12\x15\n\rboolean_value\x18\x01 \x01(\x08\x12\x15\n\rinteger_value\x18\x02 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x03 \x01(\x01\x12$\n\x1ctimestamp_microseconds_value\x18\x04 \x01(\x03\x12.\n\tkey_value\x18\x05 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x16\n\x0e\x62lob_key_value\x18\x10 \x01(\t\x12\x14\n\x0cstring_value\x18\x11 \x01(\t\x12\x12\n\nblob_value\x18\x12 \x01(\x0c\x12\x34\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1e.api.services.datastore.Entity\x12\x31\n\nlist_value\x18\x07 \x03(\x0b\x32\x1d.api.services.datastore.Value\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x15\n\x07indexed\x18\x0f \x01(\x08:\x04true\"F\n\x08Property\x12\x0c\n\x04name\x18\x01 \x02(\t\x12,\n\x05value\x18\x04 \x02(\x0b\x32\x1d.api.services.datastore.Value\"f\n\x06\x45ntity\x12(\n\x03key\x18\x01 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x32\n\x08property\x18\x02 \x03(\x0b\x32 .api.services.datastore.Property\"t\n\x0c\x45ntityResult\x12.\n\x06\x65ntity\x18\x01 \x02(\x0b\x32\x1e.api.services.datastore.Entity\"4\n\nResultType\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xec\x02\n\x05Query\x12>\n\nprojection\x18\x02 \x03(\x0b\x32*.api.services.datastore.PropertyExpression\x12\x34\n\x04kind\x18\x03 \x03(\x0b\x32&.api.services.datastore.KindExpression\x12.\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1e.api.services.datastore.Filter\x12\x34\n\x05order\x18\x05 \x03(\x0b\x32%.api.services.datastore.PropertyOrder\x12;\n\x08group_by\x18\x06 \x03(\x0b\x32).api.services.datastore.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x11\n\x06offset\x18\n \x01(\x05:\x01\x30\x12\r\n\x05limit\x18\x0b \x01(\x05\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x02(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x02(\t\"\xd1\x01\n\x12PropertyExpression\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\\\n\x14\x61ggregation_function\x18\x02 \x01(\x0e\x32>.api.services.datastore.PropertyExpression.AggregationFunction\" \n\x13\x41ggregationFunction\x12\t\n\x05\x46IRST\x10\x01\"\xc7\x01\n\rPropertyOrder\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12M\n\tdirection\x18\x02 \x01(\x0e\x32/.api.services.datastore.PropertyOrder.Direction:\tASCENDING\"*\n\tDirection\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x8c\x01\n\x06\x46ilter\x12\x41\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\'.api.services.datastore.CompositeFilter\x12?\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32&.api.services.datastore.PropertyFilter\"\x9a\x01\n\x0f\x43ompositeFilter\x12\x42\n\x08operator\x18\x01 \x02(\x0e\x32\x30.api.services.datastore.CompositeFilter.Operator\x12.\n\x06\x66ilter\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Filter\"\x13\n\x08Operator\x12\x07\n\x03\x41ND\x10\x01\"\xbb\x02\n\x0ePropertyFilter\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\x41\n\x08operator\x18\x02 \x02(\x0e\x32/.api.services.datastore.PropertyFilter.Operator\x12,\n\x05value\x18\x03 \x02(\x0b\x32\x1d.api.services.datastore.Value\"{\n\x08Operator\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xae\x01\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x02(\t\x12\x1c\n\rallow_literal\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x35\n\x08name_arg\x18\x03 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\x12\x37\n\nnumber_arg\x18\x04 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\"Y\n\x0bGqlQueryArg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.api.services.datastore.Value\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"\xf1\x02\n\x10QueryResultBatch\x12K\n\x12\x65ntity_result_type\x18\x01 \x02(\x0e\x32/.api.services.datastore.EntityResult.ResultType\x12;\n\rentity_result\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12N\n\x0cmore_results\x18\x05 \x02(\x0e\x32\x38.api.services.datastore.QueryResultBatch.MoreResultsType\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\"V\n\x0fMoreResultsType\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\"\x8e\x02\n\x08Mutation\x12.\n\x06upsert\x18\x01 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06update\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06insert\x18\x03 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12\x36\n\x0einsert_auto_id\x18\x04 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12+\n\x06\x64\x65lete\x18\x05 \x03(\x0b\x32\x1b.api.services.datastore.Key\x12\r\n\x05\x66orce\x18\x06 \x01(\x08\"`\n\x0eMutationResult\x12\x15\n\rindex_updates\x18\x01 \x02(\x05\x12\x37\n\x12insert_auto_id_key\x18\x02 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xb4\x01\n\x0bReadOptions\x12V\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x33.api.services.datastore.ReadOptions.ReadConsistency:\x07\x44\x45\x46\x41ULT\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\"8\n\x0fReadConsistency\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\"t\n\rLookupRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12(\n\x03key\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xab\x01\n\x0eLookupResponse\x12\x33\n\x05\x66ound\x18\x01 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x35\n\x07missing\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12-\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xea\x01\n\x0fRunQueryRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12\x39\n\x0cpartition_id\x18\x02 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12,\n\x05query\x18\x03 \x01(\x0b\x32\x1d.api.services.datastore.Query\x12\x33\n\tgql_query\x18\x07 \x01(\x0b\x32 .api.services.datastore.GqlQuery\"K\n\x10RunQueryResponse\x12\x37\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32(.api.services.datastore.QueryResultBatch\"\xae\x01\n\x17\x42\x65ginTransactionRequest\x12\x61\n\x0fisolation_level\x18\x01 \x01(\x0e\x32>.api.services.datastore.BeginTransactionRequest.IsolationLevel:\x08SNAPSHOT\"0\n\x0eIsolationLevel\x12\x0c\n\x08SNAPSHOT\x10\x00\x12\x10\n\x0cSERIALIZABLE\x10\x01\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"&\n\x0fRollbackRequest\x12\x13\n\x0btransaction\x18\x01 \x02(\x0c\"\x12\n\x10RollbackResponse\"\xd3\x01\n\rCommitRequest\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\x12\x32\n\x08mutation\x18\x02 \x01(\x0b\x32 .api.services.datastore.Mutation\x12G\n\x04mode\x18\x05 \x01(\x0e\x32*.api.services.datastore.CommitRequest.Mode:\rTRANSACTIONAL\"0\n\x04Mode\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\"Q\n\x0e\x43ommitResponse\x12?\n\x0fmutation_result\x18\x01 \x01(\x0b\x32&.api.services.datastore.MutationResult\">\n\x12\x41llocateIdsRequest\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key\"?\n\x13\x41llocateIdsResponse\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key2\xed\x04\n\x10\x44\x61tastoreService\x12Y\n\x06Lookup\x12%.api.services.datastore.LookupRequest\x1a&.api.services.datastore.LookupResponse\"\x00\x12_\n\x08RunQuery\x12\'.api.services.datastore.RunQueryRequest\x1a(.api.services.datastore.RunQueryResponse\"\x00\x12w\n\x10\x42\x65ginTransaction\x12/.api.services.datastore.BeginTransactionRequest\x1a\x30.api.services.datastore.BeginTransactionResponse\"\x00\x12Y\n\x06\x43ommit\x12%.api.services.datastore.CommitRequest\x1a&.api.services.datastore.CommitResponse\"\x00\x12_\n\x08Rollback\x12\'.api.services.datastore.RollbackRequest\x1a(.api.services.datastore.RollbackResponse\"\x00\x12h\n\x0b\x41llocateIds\x12*.api.services.datastore.AllocateIdsRequest\x1a+.api.services.datastore.AllocateIdsResponse\"\x00\x42#\n!com.google.api.services.datastore')
+ serialized_pb=_b('\n\x13_datastore_v1.proto\x12\x16\x61pi.services.datastore\"4\n\x0bPartitionId\x12\x12\n\ndataset_id\x18\x03 \x01(\t\x12\x11\n\tnamespace\x18\x04 \x01(\t\"\xb6\x01\n\x03Key\x12\x39\n\x0cpartition_id\x18\x01 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12=\n\x0cpath_element\x18\x02 \x03(\x0b\x32\'.api.services.datastore.Key.PathElement\x1a\x35\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x02(\t\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x0c\n\x04name\x18\x03 \x01(\t\"\xf4\x02\n\x05Value\x12\x15\n\rboolean_value\x18\x01 \x01(\x08\x12\x15\n\rinteger_value\x18\x02 \x01(\x03\x12\x14\n\x0c\x64ouble_value\x18\x03 \x01(\x01\x12$\n\x1ctimestamp_microseconds_value\x18\x04 \x01(\x03\x12.\n\tkey_value\x18\x05 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x16\n\x0e\x62lob_key_value\x18\x10 \x01(\t\x12\x14\n\x0cstring_value\x18\x11 \x01(\t\x12\x12\n\nblob_value\x18\x12 \x01(\x0c\x12\x34\n\x0c\x65ntity_value\x18\x06 \x01(\x0b\x32\x1e.api.services.datastore.Entity\x12\x31\n\nlist_value\x18\x07 \x03(\x0b\x32\x1d.api.services.datastore.Value\x12\x0f\n\x07meaning\x18\x0e \x01(\x05\x12\x15\n\x07indexed\x18\x0f \x01(\x08:\x04true\"F\n\x08Property\x12\x0c\n\x04name\x18\x01 \x02(\t\x12,\n\x05value\x18\x04 \x02(\x0b\x32\x1d.api.services.datastore.Value\"f\n\x06\x45ntity\x12(\n\x03key\x18\x01 \x01(\x0b\x32\x1b.api.services.datastore.Key\x12\x32\n\x08property\x18\x02 \x03(\x0b\x32 .api.services.datastore.Property\"t\n\x0c\x45ntityResult\x12.\n\x06\x65ntity\x18\x01 \x02(\x0b\x32\x1e.api.services.datastore.Entity\"4\n\nResultType\x12\x08\n\x04\x46ULL\x10\x01\x12\x0e\n\nPROJECTION\x10\x02\x12\x0c\n\x08KEY_ONLY\x10\x03\"\xec\x02\n\x05Query\x12>\n\nprojection\x18\x02 \x03(\x0b\x32*.api.services.datastore.PropertyExpression\x12\x34\n\x04kind\x18\x03 \x03(\x0b\x32&.api.services.datastore.KindExpression\x12.\n\x06\x66ilter\x18\x04 \x01(\x0b\x32\x1e.api.services.datastore.Filter\x12\x34\n\x05order\x18\x05 \x03(\x0b\x32%.api.services.datastore.PropertyOrder\x12;\n\x08group_by\x18\x06 \x03(\x0b\x32).api.services.datastore.PropertyReference\x12\x14\n\x0cstart_cursor\x18\x07 \x01(\x0c\x12\x12\n\nend_cursor\x18\x08 \x01(\x0c\x12\x11\n\x06offset\x18\n \x01(\x05:\x01\x30\x12\r\n\x05limit\x18\x0b \x01(\x05\"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x02(\t\"!\n\x11PropertyReference\x12\x0c\n\x04name\x18\x02 \x02(\t\"\xd1\x01\n\x12PropertyExpression\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\\\n\x14\x61ggregation_function\x18\x02 \x01(\x0e\x32>.api.services.datastore.PropertyExpression.AggregationFunction\" \n\x13\x41ggregationFunction\x12\t\n\x05\x46IRST\x10\x01\"\xc7\x01\n\rPropertyOrder\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12M\n\tdirection\x18\x02 \x01(\x0e\x32/.api.services.datastore.PropertyOrder.Direction:\tASCENDING\"*\n\tDirection\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02\"\x8c\x01\n\x06\x46ilter\x12\x41\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\'.api.services.datastore.CompositeFilter\x12?\n\x0fproperty_filter\x18\x02 \x01(\x0b\x32&.api.services.datastore.PropertyFilter\"\x9a\x01\n\x0f\x43ompositeFilter\x12\x42\n\x08operator\x18\x01 \x02(\x0e\x32\x30.api.services.datastore.CompositeFilter.Operator\x12.\n\x06\x66ilter\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Filter\"\x13\n\x08Operator\x12\x07\n\x03\x41ND\x10\x01\"\xbb\x02\n\x0ePropertyFilter\x12;\n\x08property\x18\x01 \x02(\x0b\x32).api.services.datastore.PropertyReference\x12\x41\n\x08operator\x18\x02 \x02(\x0e\x32/.api.services.datastore.PropertyFilter.Operator\x12,\n\x05value\x18\x03 \x02(\x0b\x32\x1d.api.services.datastore.Value\"{\n\x08Operator\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x10\n\x0cHAS_ANCESTOR\x10\x0b\"\xae\x01\n\x08GqlQuery\x12\x14\n\x0cquery_string\x18\x01 \x02(\t\x12\x1c\n\rallow_literal\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x35\n\x08name_arg\x18\x03 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\x12\x37\n\nnumber_arg\x18\x04 \x03(\x0b\x32#.api.services.datastore.GqlQueryArg\"Y\n\x0bGqlQueryArg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.api.services.datastore.Value\x12\x0e\n\x06\x63ursor\x18\x03 \x01(\x0c\"\xf1\x02\n\x10QueryResultBatch\x12K\n\x12\x65ntity_result_type\x18\x01 \x02(\x0e\x32/.api.services.datastore.EntityResult.ResultType\x12;\n\rentity_result\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x12\n\nend_cursor\x18\x04 \x01(\x0c\x12N\n\x0cmore_results\x18\x05 \x02(\x0e\x32\x38.api.services.datastore.QueryResultBatch.MoreResultsType\x12\x17\n\x0fskipped_results\x18\x06 \x01(\x05\"V\n\x0fMoreResultsType\x12\x10\n\x0cNOT_FINISHED\x10\x01\x12\x1c\n\x18MORE_RESULTS_AFTER_LIMIT\x10\x02\x12\x13\n\x0fNO_MORE_RESULTS\x10\x03\"\x8e\x02\n\x08Mutation\x12.\n\x06upsert\x18\x01 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06update\x18\x02 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12.\n\x06insert\x18\x03 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12\x36\n\x0einsert_auto_id\x18\x04 \x03(\x0b\x32\x1e.api.services.datastore.Entity\x12+\n\x06\x64\x65lete\x18\x05 \x03(\x0b\x32\x1b.api.services.datastore.Key\x12\r\n\x05\x66orce\x18\x06 \x01(\x08\"`\n\x0eMutationResult\x12\x15\n\rindex_updates\x18\x01 \x02(\x05\x12\x37\n\x12insert_auto_id_key\x18\x02 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xb4\x01\n\x0bReadOptions\x12V\n\x10read_consistency\x18\x01 \x01(\x0e\x32\x33.api.services.datastore.ReadOptions.ReadConsistency:\x07\x44\x45\x46\x41ULT\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\"8\n\x0fReadConsistency\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\n\n\x06STRONG\x10\x01\x12\x0c\n\x08\x45VENTUAL\x10\x02\"t\n\rLookupRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12(\n\x03key\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xab\x01\n\x0eLookupResponse\x12\x33\n\x05\x66ound\x18\x01 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12\x35\n\x07missing\x18\x02 \x03(\x0b\x32$.api.services.datastore.EntityResult\x12-\n\x08\x64\x65\x66\x65rred\x18\x03 \x03(\x0b\x32\x1b.api.services.datastore.Key\"\xea\x01\n\x0fRunQueryRequest\x12\x39\n\x0cread_options\x18\x01 \x01(\x0b\x32#.api.services.datastore.ReadOptions\x12\x39\n\x0cpartition_id\x18\x02 \x01(\x0b\x32#.api.services.datastore.PartitionId\x12,\n\x05query\x18\x03 \x01(\x0b\x32\x1d.api.services.datastore.Query\x12\x33\n\tgql_query\x18\x07 \x01(\x0b\x32 .api.services.datastore.GqlQuery\"K\n\x10RunQueryResponse\x12\x37\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32(.api.services.datastore.QueryResultBatch\"\xae\x01\n\x17\x42\x65ginTransactionRequest\x12\x61\n\x0fisolation_level\x18\x01 \x01(\x0e\x32>.api.services.datastore.BeginTransactionRequest.IsolationLevel:\x08SNAPSHOT\"0\n\x0eIsolationLevel\x12\x0c\n\x08SNAPSHOT\x10\x00\x12\x10\n\x0cSERIALIZABLE\x10\x01\"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\"&\n\x0fRollbackRequest\x12\x13\n\x0btransaction\x18\x01 \x02(\x0c\"\x12\n\x10RollbackResponse\"\xd3\x01\n\rCommitRequest\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c\x12\x32\n\x08mutation\x18\x02 \x01(\x0b\x32 .api.services.datastore.Mutation\x12G\n\x04mode\x18\x05 \x01(\x0e\x32*.api.services.datastore.CommitRequest.Mode:\rTRANSACTIONAL\"0\n\x04Mode\x12\x11\n\rTRANSACTIONAL\x10\x01\x12\x15\n\x11NON_TRANSACTIONAL\x10\x02\"Q\n\x0e\x43ommitResponse\x12?\n\x0fmutation_result\x18\x01 \x01(\x0b\x32&.api.services.datastore.MutationResult\">\n\x12\x41llocateIdsRequest\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key\"?\n\x13\x41llocateIdsResponse\x12(\n\x03key\x18\x01 \x03(\x0b\x32\x1b.api.services.datastore.Key2\xed\x04\n\x10\x44\x61tastoreService\x12Y\n\x06Lookup\x12%.api.services.datastore.LookupRequest\x1a&.api.services.datastore.LookupResponse\"\x00\x12_\n\x08RunQuery\x12\'.api.services.datastore.RunQueryRequest\x1a(.api.services.datastore.RunQueryResponse\"\x00\x12w\n\x10\x42\x65ginTransaction\x12/.api.services.datastore.BeginTransactionRequest\x1a\x30.api.services.datastore.BeginTransactionResponse\"\x00\x12Y\n\x06\x43ommit\x12%.api.services.datastore.CommitRequest\x1a&.api.services.datastore.CommitResponse\"\x00\x12_\n\x08Rollback\x12\'.api.services.datastore.RollbackRequest\x1a(.api.services.datastore.RollbackResponse\"\x00\x12h\n\x0b\x41llocateIds\x12*.api.services.datastore.AllocateIdsRequest\x1a+.api.services.datastore.AllocateIdsResponse\"\x00\x42#\n!com.google.api.services.datastore')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
@@ -52,9 +45,10 @@
],
containing_type=None,
options=None,
- serialized_start=900,
- serialized_end=952,
+ serialized_start=901,
+ serialized_end=953,
)
+_sym_db.RegisterEnumDescriptor(_ENTITYRESULT_RESULTTYPE)
_PROPERTYEXPRESSION_AGGREGATIONFUNCTION = _descriptor.EnumDescriptor(
name='AggregationFunction',
@@ -69,9 +63,10 @@
],
containing_type=None,
options=None,
- serialized_start=1566,
- serialized_end=1598,
+ serialized_start=1567,
+ serialized_end=1599,
)
+_sym_db.RegisterEnumDescriptor(_PROPERTYEXPRESSION_AGGREGATIONFUNCTION)
_PROPERTYORDER_DIRECTION = _descriptor.EnumDescriptor(
name='Direction',
@@ -90,9 +85,10 @@
],
containing_type=None,
options=None,
- serialized_start=1758,
- serialized_end=1800,
+ serialized_start=1759,
+ serialized_end=1801,
)
+_sym_db.RegisterEnumDescriptor(_PROPERTYORDER_DIRECTION)
_COMPOSITEFILTER_OPERATOR = _descriptor.EnumDescriptor(
name='Operator',
@@ -107,9 +103,10 @@
],
containing_type=None,
options=None,
- serialized_start=2081,
- serialized_end=2100,
+ serialized_start=2082,
+ serialized_end=2101,
)
+_sym_db.RegisterEnumDescriptor(_COMPOSITEFILTER_OPERATOR)
_PROPERTYFILTER_OPERATOR = _descriptor.EnumDescriptor(
name='Operator',
@@ -144,9 +141,10 @@
],
containing_type=None,
options=None,
- serialized_start=2295,
- serialized_end=2418,
+ serialized_start=2296,
+ serialized_end=2419,
)
+_sym_db.RegisterEnumDescriptor(_PROPERTYFILTER_OPERATOR)
_QUERYRESULTBATCH_MORERESULTSTYPE = _descriptor.EnumDescriptor(
name='MoreResultsType',
@@ -169,9 +167,10 @@
],
containing_type=None,
options=None,
- serialized_start=2972,
- serialized_end=3058,
+ serialized_start=2973,
+ serialized_end=3059,
)
+_sym_db.RegisterEnumDescriptor(_QUERYRESULTBATCH_MORERESULTSTYPE)
_READOPTIONS_READCONSISTENCY = _descriptor.EnumDescriptor(
name='ReadConsistency',
@@ -194,9 +193,10 @@
],
containing_type=None,
options=None,
- serialized_start=3556,
- serialized_end=3612,
+ serialized_start=3557,
+ serialized_end=3613,
)
+_sym_db.RegisterEnumDescriptor(_READOPTIONS_READCONSISTENCY)
_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL = _descriptor.EnumDescriptor(
name='IsolationLevel',
@@ -215,9 +215,10 @@
],
containing_type=None,
options=None,
- serialized_start=4347,
- serialized_end=4395,
+ serialized_start=4348,
+ serialized_end=4396,
)
+_sym_db.RegisterEnumDescriptor(_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL)
_COMMITREQUEST_MODE = _descriptor.EnumDescriptor(
name='Mode',
@@ -236,9 +237,10 @@
],
containing_type=None,
options=None,
- serialized_start=4670,
- serialized_end=4718,
+ serialized_start=4671,
+ serialized_end=4719,
)
+_sym_db.RegisterEnumDescriptor(_COMMITREQUEST_MODE)
_PARTITIONID = _descriptor.Descriptor(
@@ -251,14 +253,14 @@
_descriptor.FieldDescriptor(
name='dataset_id', full_name='api.services.datastore.PartitionId.dataset_id', index=0,
number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='namespace', full_name='api.services.datastore.PartitionId.namespace', index=1,
number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -271,8 +273,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=46,
- serialized_end=98,
+ oneofs=[
+ ],
+ serialized_start=47,
+ serialized_end=99,
)
@@ -286,7 +290,7 @@
_descriptor.FieldDescriptor(
name='kind', full_name='api.services.datastore.Key.PathElement.kind', index=0,
number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -300,7 +304,7 @@
_descriptor.FieldDescriptor(
name='name', full_name='api.services.datastore.Key.PathElement.name', index=2,
number=3, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -313,8 +317,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=230,
- serialized_end=283,
+ oneofs=[
+ ],
+ serialized_start=231,
+ serialized_end=284,
)
_KEY = _descriptor.Descriptor(
@@ -347,8 +353,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=101,
- serialized_end=283,
+ oneofs=[
+ ],
+ serialized_start=102,
+ serialized_end=284,
)
@@ -397,21 +405,21 @@
_descriptor.FieldDescriptor(
name='blob_key_value', full_name='api.services.datastore.Value.blob_key_value', index=5,
number=16, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='string_value', full_name='api.services.datastore.Value.string_value', index=6,
number=17, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blob_value', full_name='api.services.datastore.Value.blob_value', index=7,
number=18, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -452,8 +460,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=286,
- serialized_end=658,
+ oneofs=[
+ ],
+ serialized_start=287,
+ serialized_end=659,
)
@@ -467,7 +477,7 @@
_descriptor.FieldDescriptor(
name='name', full_name='api.services.datastore.Property.name', index=0,
number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -487,8 +497,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=660,
- serialized_end=730,
+ oneofs=[
+ ],
+ serialized_start=661,
+ serialized_end=731,
)
@@ -522,8 +534,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=732,
- serialized_end=834,
+ oneofs=[
+ ],
+ serialized_start=733,
+ serialized_end=835,
)
@@ -551,8 +565,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=836,
- serialized_end=952,
+ oneofs=[
+ ],
+ serialized_start=837,
+ serialized_end=953,
)
@@ -601,14 +617,14 @@
_descriptor.FieldDescriptor(
name='start_cursor', full_name='api.services.datastore.Query.start_cursor', index=5,
number=7, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='end_cursor', full_name='api.services.datastore.Query.end_cursor', index=6,
number=8, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -635,8 +651,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=955,
- serialized_end=1319,
+ oneofs=[
+ ],
+ serialized_start=956,
+ serialized_end=1320,
)
@@ -650,7 +668,7 @@
_descriptor.FieldDescriptor(
name='name', full_name='api.services.datastore.KindExpression.name', index=0,
number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -663,8 +681,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1321,
- serialized_end=1351,
+ oneofs=[
+ ],
+ serialized_start=1322,
+ serialized_end=1352,
)
@@ -678,7 +698,7 @@
_descriptor.FieldDescriptor(
name='name', full_name='api.services.datastore.PropertyReference.name', index=0,
number=2, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -691,8 +711,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1353,
- serialized_end=1386,
+ oneofs=[
+ ],
+ serialized_start=1354,
+ serialized_end=1387,
)
@@ -727,8 +749,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1389,
- serialized_end=1598,
+ oneofs=[
+ ],
+ serialized_start=1390,
+ serialized_end=1599,
)
@@ -763,8 +787,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1601,
- serialized_end=1800,
+ oneofs=[
+ ],
+ serialized_start=1602,
+ serialized_end=1801,
)
@@ -798,8 +824,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1803,
- serialized_end=1943,
+ oneofs=[
+ ],
+ serialized_start=1804,
+ serialized_end=1944,
)
@@ -834,8 +862,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=1946,
- serialized_end=2100,
+ oneofs=[
+ ],
+ serialized_start=1947,
+ serialized_end=2101,
)
@@ -877,8 +907,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=2103,
- serialized_end=2418,
+ oneofs=[
+ ],
+ serialized_start=2104,
+ serialized_end=2419,
)
@@ -892,7 +924,7 @@
_descriptor.FieldDescriptor(
name='query_string', full_name='api.services.datastore.GqlQuery.query_string', index=0,
number=1, type=9, cpp_type=9, label=2,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -926,8 +958,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=2421,
- serialized_end=2595,
+ oneofs=[
+ ],
+ serialized_start=2422,
+ serialized_end=2596,
)
@@ -941,7 +975,7 @@
_descriptor.FieldDescriptor(
name='name', full_name='api.services.datastore.GqlQueryArg.name', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=unicode("", "utf-8"),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -955,7 +989,7 @@
_descriptor.FieldDescriptor(
name='cursor', full_name='api.services.datastore.GqlQueryArg.cursor', index=2,
number=3, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -968,8 +1002,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=2597,
- serialized_end=2686,
+ oneofs=[
+ ],
+ serialized_start=2598,
+ serialized_end=2687,
)
@@ -997,7 +1033,7 @@
_descriptor.FieldDescriptor(
name='end_cursor', full_name='api.services.datastore.QueryResultBatch.end_cursor', index=2,
number=4, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -1025,8 +1061,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=2689,
- serialized_end=3058,
+ oneofs=[
+ ],
+ serialized_start=2690,
+ serialized_end=3059,
)
@@ -1088,8 +1126,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=3061,
- serialized_end=3331,
+ oneofs=[
+ ],
+ serialized_start=3062,
+ serialized_end=3332,
)
@@ -1123,8 +1163,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=3333,
- serialized_end=3429,
+ oneofs=[
+ ],
+ serialized_start=3334,
+ serialized_end=3430,
)
@@ -1145,7 +1187,7 @@
_descriptor.FieldDescriptor(
name='transaction', full_name='api.services.datastore.ReadOptions.transaction', index=1,
number=2, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -1159,8 +1201,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=3432,
- serialized_end=3612,
+ oneofs=[
+ ],
+ serialized_start=3433,
+ serialized_end=3613,
)
@@ -1194,8 +1238,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=3614,
- serialized_end=3730,
+ oneofs=[
+ ],
+ serialized_start=3615,
+ serialized_end=3731,
)
@@ -1236,8 +1282,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=3733,
- serialized_end=3904,
+ oneofs=[
+ ],
+ serialized_start=3734,
+ serialized_end=3905,
)
@@ -1285,8 +1333,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=3907,
- serialized_end=4141,
+ oneofs=[
+ ],
+ serialized_start=3908,
+ serialized_end=4142,
)
@@ -1313,8 +1363,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4143,
- serialized_end=4218,
+ oneofs=[
+ ],
+ serialized_start=4144,
+ serialized_end=4219,
)
@@ -1342,8 +1394,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4221,
- serialized_end=4395,
+ oneofs=[
+ ],
+ serialized_start=4222,
+ serialized_end=4396,
)
@@ -1357,7 +1411,7 @@
_descriptor.FieldDescriptor(
name='transaction', full_name='api.services.datastore.BeginTransactionResponse.transaction', index=0,
number=1, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -1370,8 +1424,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4397,
- serialized_end=4444,
+ oneofs=[
+ ],
+ serialized_start=4398,
+ serialized_end=4445,
)
@@ -1385,7 +1441,7 @@
_descriptor.FieldDescriptor(
name='transaction', full_name='api.services.datastore.RollbackRequest.transaction', index=0,
number=1, type=12, cpp_type=9, label=2,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -1398,8 +1454,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4446,
- serialized_end=4484,
+ oneofs=[
+ ],
+ serialized_start=4447,
+ serialized_end=4485,
)
@@ -1419,8 +1477,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4486,
- serialized_end=4504,
+ oneofs=[
+ ],
+ serialized_start=4487,
+ serialized_end=4505,
)
@@ -1434,7 +1494,7 @@
_descriptor.FieldDescriptor(
name='transaction', full_name='api.services.datastore.CommitRequest.transaction', index=0,
number=1, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value="",
+ has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
@@ -1462,8 +1522,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4507,
- serialized_end=4718,
+ oneofs=[
+ ],
+ serialized_start=4508,
+ serialized_end=4719,
)
@@ -1490,8 +1552,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4720,
- serialized_end=4801,
+ oneofs=[
+ ],
+ serialized_start=4721,
+ serialized_end=4802,
)
@@ -1518,8 +1582,10 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4803,
- serialized_end=4865,
+ oneofs=[
+ ],
+ serialized_start=4804,
+ serialized_end=4866,
)
@@ -1546,11 +1612,13 @@
options=None,
is_extendable=False,
extension_ranges=[],
- serialized_start=4867,
- serialized_end=4930,
+ oneofs=[
+ ],
+ serialized_start=4868,
+ serialized_end=4931,
)
-_KEY_PATHELEMENT.containing_type = _KEY;
+_KEY_PATHELEMENT.containing_type = _KEY
_KEY.fields_by_name['partition_id'].message_type = _PARTITIONID
_KEY.fields_by_name['path_element'].message_type = _KEY_PATHELEMENT
_VALUE.fields_by_name['key_value'].message_type = _KEY
@@ -1560,7 +1628,7 @@
_ENTITY.fields_by_name['key'].message_type = _KEY
_ENTITY.fields_by_name['property'].message_type = _PROPERTY
_ENTITYRESULT.fields_by_name['entity'].message_type = _ENTITY
-_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT;
+_ENTITYRESULT_RESULTTYPE.containing_type = _ENTITYRESULT
_QUERY.fields_by_name['projection'].message_type = _PROPERTYEXPRESSION
_QUERY.fields_by_name['kind'].message_type = _KINDEXPRESSION
_QUERY.fields_by_name['filter'].message_type = _FILTER
@@ -1568,26 +1636,26 @@
_QUERY.fields_by_name['group_by'].message_type = _PROPERTYREFERENCE
_PROPERTYEXPRESSION.fields_by_name['property'].message_type = _PROPERTYREFERENCE
_PROPERTYEXPRESSION.fields_by_name['aggregation_function'].enum_type = _PROPERTYEXPRESSION_AGGREGATIONFUNCTION
-_PROPERTYEXPRESSION_AGGREGATIONFUNCTION.containing_type = _PROPERTYEXPRESSION;
+_PROPERTYEXPRESSION_AGGREGATIONFUNCTION.containing_type = _PROPERTYEXPRESSION
_PROPERTYORDER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
_PROPERTYORDER.fields_by_name['direction'].enum_type = _PROPERTYORDER_DIRECTION
-_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER;
+_PROPERTYORDER_DIRECTION.containing_type = _PROPERTYORDER
_FILTER.fields_by_name['composite_filter'].message_type = _COMPOSITEFILTER
_FILTER.fields_by_name['property_filter'].message_type = _PROPERTYFILTER
_COMPOSITEFILTER.fields_by_name['operator'].enum_type = _COMPOSITEFILTER_OPERATOR
_COMPOSITEFILTER.fields_by_name['filter'].message_type = _FILTER
-_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER;
+_COMPOSITEFILTER_OPERATOR.containing_type = _COMPOSITEFILTER
_PROPERTYFILTER.fields_by_name['property'].message_type = _PROPERTYREFERENCE
_PROPERTYFILTER.fields_by_name['operator'].enum_type = _PROPERTYFILTER_OPERATOR
_PROPERTYFILTER.fields_by_name['value'].message_type = _VALUE
-_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER;
+_PROPERTYFILTER_OPERATOR.containing_type = _PROPERTYFILTER
_GQLQUERY.fields_by_name['name_arg'].message_type = _GQLQUERYARG
_GQLQUERY.fields_by_name['number_arg'].message_type = _GQLQUERYARG
_GQLQUERYARG.fields_by_name['value'].message_type = _VALUE
_QUERYRESULTBATCH.fields_by_name['entity_result_type'].enum_type = _ENTITYRESULT_RESULTTYPE
_QUERYRESULTBATCH.fields_by_name['entity_result'].message_type = _ENTITYRESULT
_QUERYRESULTBATCH.fields_by_name['more_results'].enum_type = _QUERYRESULTBATCH_MORERESULTSTYPE
-_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH;
+_QUERYRESULTBATCH_MORERESULTSTYPE.containing_type = _QUERYRESULTBATCH
_MUTATION.fields_by_name['upsert'].message_type = _ENTITY
_MUTATION.fields_by_name['update'].message_type = _ENTITY
_MUTATION.fields_by_name['insert'].message_type = _ENTITY
@@ -1595,7 +1663,7 @@
_MUTATION.fields_by_name['delete'].message_type = _KEY
_MUTATIONRESULT.fields_by_name['insert_auto_id_key'].message_type = _KEY
_READOPTIONS.fields_by_name['read_consistency'].enum_type = _READOPTIONS_READCONSISTENCY
-_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS;
+_READOPTIONS_READCONSISTENCY.containing_type = _READOPTIONS
_LOOKUPREQUEST.fields_by_name['read_options'].message_type = _READOPTIONS
_LOOKUPREQUEST.fields_by_name['key'].message_type = _KEY
_LOOKUPRESPONSE.fields_by_name['found'].message_type = _ENTITYRESULT
@@ -1607,10 +1675,10 @@
_RUNQUERYREQUEST.fields_by_name['gql_query'].message_type = _GQLQUERY
_RUNQUERYRESPONSE.fields_by_name['batch'].message_type = _QUERYRESULTBATCH
_BEGINTRANSACTIONREQUEST.fields_by_name['isolation_level'].enum_type = _BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL
-_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL.containing_type = _BEGINTRANSACTIONREQUEST;
+_BEGINTRANSACTIONREQUEST_ISOLATIONLEVEL.containing_type = _BEGINTRANSACTIONREQUEST
_COMMITREQUEST.fields_by_name['mutation'].message_type = _MUTATION
_COMMITREQUEST.fields_by_name['mode'].enum_type = _COMMITREQUEST_MODE
-_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST;
+_COMMITREQUEST_MODE.containing_type = _COMMITREQUEST
_COMMITRESPONSE.fields_by_name['mutation_result'].message_type = _MUTATIONRESULT
_ALLOCATEIDSREQUEST.fields_by_name['key'].message_type = _KEY
_ALLOCATEIDSRESPONSE.fields_by_name['key'].message_type = _KEY
@@ -1647,205 +1715,239 @@
DESCRIPTOR.message_types_by_name['AllocateIdsRequest'] = _ALLOCATEIDSREQUEST
DESCRIPTOR.message_types_by_name['AllocateIdsResponse'] = _ALLOCATEIDSRESPONSE
-class PartitionId(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _PARTITIONID
-
+PartitionId = _reflection.GeneratedProtocolMessageType('PartitionId', (_message.Message,), dict(
+ DESCRIPTOR = _PARTITIONID,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.PartitionId)
+ ))
+_sym_db.RegisterMessage(PartitionId)
-class Key(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
-
- class PathElement(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _KEY_PATHELEMENT
+Key = _reflection.GeneratedProtocolMessageType('Key', (_message.Message,), dict(
+ PathElement = _reflection.GeneratedProtocolMessageType('PathElement', (_message.Message,), dict(
+ DESCRIPTOR = _KEY_PATHELEMENT,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Key.PathElement)
- DESCRIPTOR = _KEY
-
+ ))
+ ,
+ DESCRIPTOR = _KEY,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Key)
+ ))
+_sym_db.RegisterMessage(Key)
+_sym_db.RegisterMessage(Key.PathElement)
-class Value(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _VALUE
-
+Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), dict(
+ DESCRIPTOR = _VALUE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Value)
+ ))
+_sym_db.RegisterMessage(Value)
-class Property(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _PROPERTY
-
+Property = _reflection.GeneratedProtocolMessageType('Property', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTY,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Property)
+ ))
+_sym_db.RegisterMessage(Property)
-class Entity(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ENTITY
-
+Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict(
+ DESCRIPTOR = _ENTITY,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Entity)
+ ))
+_sym_db.RegisterMessage(Entity)
-class EntityResult(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ENTITYRESULT
-
+EntityResult = _reflection.GeneratedProtocolMessageType('EntityResult', (_message.Message,), dict(
+ DESCRIPTOR = _ENTITYRESULT,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.EntityResult)
+ ))
+_sym_db.RegisterMessage(EntityResult)
-class Query(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _QUERY
-
+Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict(
+ DESCRIPTOR = _QUERY,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Query)
+ ))
+_sym_db.RegisterMessage(Query)
-class KindExpression(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _KINDEXPRESSION
-
+KindExpression = _reflection.GeneratedProtocolMessageType('KindExpression', (_message.Message,), dict(
+ DESCRIPTOR = _KINDEXPRESSION,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.KindExpression)
+ ))
+_sym_db.RegisterMessage(KindExpression)
-class PropertyReference(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _PROPERTYREFERENCE
-
+PropertyReference = _reflection.GeneratedProtocolMessageType('PropertyReference', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTYREFERENCE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.PropertyReference)
+ ))
+_sym_db.RegisterMessage(PropertyReference)
-class PropertyExpression(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _PROPERTYEXPRESSION
-
+PropertyExpression = _reflection.GeneratedProtocolMessageType('PropertyExpression', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTYEXPRESSION,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.PropertyExpression)
+ ))
+_sym_db.RegisterMessage(PropertyExpression)
-class PropertyOrder(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _PROPERTYORDER
-
+PropertyOrder = _reflection.GeneratedProtocolMessageType('PropertyOrder', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTYORDER,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.PropertyOrder)
+ ))
+_sym_db.RegisterMessage(PropertyOrder)
-class Filter(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _FILTER
-
+Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), dict(
+ DESCRIPTOR = _FILTER,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Filter)
+ ))
+_sym_db.RegisterMessage(Filter)
-class CompositeFilter(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _COMPOSITEFILTER
-
+CompositeFilter = _reflection.GeneratedProtocolMessageType('CompositeFilter', (_message.Message,), dict(
+ DESCRIPTOR = _COMPOSITEFILTER,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.CompositeFilter)
+ ))
+_sym_db.RegisterMessage(CompositeFilter)
-class PropertyFilter(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _PROPERTYFILTER
-
+PropertyFilter = _reflection.GeneratedProtocolMessageType('PropertyFilter', (_message.Message,), dict(
+ DESCRIPTOR = _PROPERTYFILTER,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.PropertyFilter)
+ ))
+_sym_db.RegisterMessage(PropertyFilter)
-class GqlQuery(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _GQLQUERY
-
+GqlQuery = _reflection.GeneratedProtocolMessageType('GqlQuery', (_message.Message,), dict(
+ DESCRIPTOR = _GQLQUERY,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.GqlQuery)
+ ))
+_sym_db.RegisterMessage(GqlQuery)
-class GqlQueryArg(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _GQLQUERYARG
-
+GqlQueryArg = _reflection.GeneratedProtocolMessageType('GqlQueryArg', (_message.Message,), dict(
+ DESCRIPTOR = _GQLQUERYARG,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.GqlQueryArg)
+ ))
+_sym_db.RegisterMessage(GqlQueryArg)
-class QueryResultBatch(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _QUERYRESULTBATCH
-
+QueryResultBatch = _reflection.GeneratedProtocolMessageType('QueryResultBatch', (_message.Message,), dict(
+ DESCRIPTOR = _QUERYRESULTBATCH,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.QueryResultBatch)
+ ))
+_sym_db.RegisterMessage(QueryResultBatch)
-class Mutation(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _MUTATION
-
+Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), dict(
+ DESCRIPTOR = _MUTATION,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.Mutation)
+ ))
+_sym_db.RegisterMessage(Mutation)
-class MutationResult(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _MUTATIONRESULT
-
+MutationResult = _reflection.GeneratedProtocolMessageType('MutationResult', (_message.Message,), dict(
+ DESCRIPTOR = _MUTATIONRESULT,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.MutationResult)
+ ))
+_sym_db.RegisterMessage(MutationResult)
-class ReadOptions(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _READOPTIONS
-
+ReadOptions = _reflection.GeneratedProtocolMessageType('ReadOptions', (_message.Message,), dict(
+ DESCRIPTOR = _READOPTIONS,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.ReadOptions)
+ ))
+_sym_db.RegisterMessage(ReadOptions)
-class LookupRequest(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _LOOKUPREQUEST
-
+LookupRequest = _reflection.GeneratedProtocolMessageType('LookupRequest', (_message.Message,), dict(
+ DESCRIPTOR = _LOOKUPREQUEST,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.LookupRequest)
+ ))
+_sym_db.RegisterMessage(LookupRequest)
-class LookupResponse(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _LOOKUPRESPONSE
-
+LookupResponse = _reflection.GeneratedProtocolMessageType('LookupResponse', (_message.Message,), dict(
+ DESCRIPTOR = _LOOKUPRESPONSE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.LookupResponse)
+ ))
+_sym_db.RegisterMessage(LookupResponse)
-class RunQueryRequest(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _RUNQUERYREQUEST
-
+RunQueryRequest = _reflection.GeneratedProtocolMessageType('RunQueryRequest', (_message.Message,), dict(
+ DESCRIPTOR = _RUNQUERYREQUEST,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryRequest)
+ ))
+_sym_db.RegisterMessage(RunQueryRequest)
-class RunQueryResponse(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _RUNQUERYRESPONSE
-
+RunQueryResponse = _reflection.GeneratedProtocolMessageType('RunQueryResponse', (_message.Message,), dict(
+ DESCRIPTOR = _RUNQUERYRESPONSE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.RunQueryResponse)
+ ))
+_sym_db.RegisterMessage(RunQueryResponse)
-class BeginTransactionRequest(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _BEGINTRANSACTIONREQUEST
-
+BeginTransactionRequest = _reflection.GeneratedProtocolMessageType('BeginTransactionRequest', (_message.Message,), dict(
+ DESCRIPTOR = _BEGINTRANSACTIONREQUEST,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionRequest)
+ ))
+_sym_db.RegisterMessage(BeginTransactionRequest)
-class BeginTransactionResponse(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _BEGINTRANSACTIONRESPONSE
-
+BeginTransactionResponse = _reflection.GeneratedProtocolMessageType('BeginTransactionResponse', (_message.Message,), dict(
+ DESCRIPTOR = _BEGINTRANSACTIONRESPONSE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.BeginTransactionResponse)
+ ))
+_sym_db.RegisterMessage(BeginTransactionResponse)
-class RollbackRequest(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ROLLBACKREQUEST
-
+RollbackRequest = _reflection.GeneratedProtocolMessageType('RollbackRequest', (_message.Message,), dict(
+ DESCRIPTOR = _ROLLBACKREQUEST,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.RollbackRequest)
+ ))
+_sym_db.RegisterMessage(RollbackRequest)
-class RollbackResponse(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ROLLBACKRESPONSE
-
+RollbackResponse = _reflection.GeneratedProtocolMessageType('RollbackResponse', (_message.Message,), dict(
+ DESCRIPTOR = _ROLLBACKRESPONSE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.RollbackResponse)
+ ))
+_sym_db.RegisterMessage(RollbackResponse)
-class CommitRequest(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _COMMITREQUEST
-
+CommitRequest = _reflection.GeneratedProtocolMessageType('CommitRequest', (_message.Message,), dict(
+ DESCRIPTOR = _COMMITREQUEST,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.CommitRequest)
+ ))
+_sym_db.RegisterMessage(CommitRequest)
-class CommitResponse(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _COMMITRESPONSE
-
+CommitResponse = _reflection.GeneratedProtocolMessageType('CommitResponse', (_message.Message,), dict(
+ DESCRIPTOR = _COMMITRESPONSE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.CommitResponse)
+ ))
+_sym_db.RegisterMessage(CommitResponse)
-class AllocateIdsRequest(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ALLOCATEIDSREQUEST
-
+AllocateIdsRequest = _reflection.GeneratedProtocolMessageType('AllocateIdsRequest', (_message.Message,), dict(
+ DESCRIPTOR = _ALLOCATEIDSREQUEST,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsRequest)
+ ))
+_sym_db.RegisterMessage(AllocateIdsRequest)
-class AllocateIdsResponse(_message.Message):
- __metaclass__ = _reflection.GeneratedProtocolMessageType
- DESCRIPTOR = _ALLOCATEIDSRESPONSE
-
+AllocateIdsResponse = _reflection.GeneratedProtocolMessageType('AllocateIdsResponse', (_message.Message,), dict(
+ DESCRIPTOR = _ALLOCATEIDSRESPONSE,
+ __module__ = '_datastore_v1_pb2'
# @@protoc_insertion_point(class_scope:api.services.datastore.AllocateIdsResponse)
+ ))
+_sym_db.RegisterMessage(AllocateIdsResponse)
DESCRIPTOR.has_options = True
-DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n!com.google.api.services.datastore')
+DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n!com.google.api.services.datastore'))
# @@protoc_insertion_point(module_scope)
diff --git a/gcloud/datastore/_implicit_environ.py b/gcloud/datastore/_implicit_environ.py
index 26e6b69382e5..504cd4c8942d 100644
--- a/gcloud/datastore/_implicit_environ.py
+++ b/gcloud/datastore/_implicit_environ.py
@@ -18,9 +18,10 @@
imply the current dataset ID and connection from the enviroment.
"""
-import httplib
import socket
+from six.moves.http_client import HTTPConnection # pylint: disable=F0401
+
try:
from google.appengine.api import app_identity
except ImportError:
@@ -67,7 +68,7 @@ def compute_engine_id():
host = '169.254.169.254'
uri_path = '/computeMetadata/v1/project/project-id'
headers = {'Metadata-Flavor': 'Google'}
- connection = httplib.HTTPConnection(host, timeout=0.1)
+ connection = HTTPConnection(host, timeout=0.1)
try:
connection.request('GET', uri_path, headers=headers)
diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py
index 4632de0c1461..f0130c802e04 100644
--- a/gcloud/datastore/key.py
+++ b/gcloud/datastore/key.py
@@ -15,7 +15,6 @@
"""Create / interact with gcloud datastore keys."""
import copy
-from itertools import izip
import six
from gcloud.datastore import _implicit_environ
@@ -102,7 +101,7 @@ def _parse_path(path_args):
id_or_name_list += (partial_ending,)
result = []
- for kind, id_or_name in izip(kind_list, id_or_name_list):
+ for kind, id_or_name in zip(kind_list, id_or_name_list):
curr_key_part = {}
if isinstance(kind, six.string_types):
curr_key_part['kind'] = kind
diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py
index 852b5f82f5d4..0ebc28a8927c 100644
--- a/gcloud/datastore/test___init__.py
+++ b/gcloud/datastore/test___init__.py
@@ -30,59 +30,86 @@ def _callFUT(self, dataset_id=None):
from gcloud.datastore import set_default_dataset_id
return set_default_dataset_id(dataset_id=dataset_id)
- def _monkey(self, implicit_dataset_id, httplib=None, app_identity=None):
- from contextlib import nested
+ def _monkeyEnviron(self, implicit_dataset_id):
import os
-
from gcloud._testing import _Monkey
from gcloud.datastore import _DATASET_ENV_VAR_NAME
+ environ = {_DATASET_ENV_VAR_NAME: implicit_dataset_id}
+ return _Monkey(os, getenv=environ.get)
+
+ def _monkeyImplicit(self, connection=None, app_identity=None):
+ from gcloud._testing import _Monkey
from gcloud.datastore import _implicit_environ
- environ = {_DATASET_ENV_VAR_NAME: implicit_dataset_id}
- httplib = httplib or _Httplib(None, status=404)
- return nested(_Monkey(os, getenv=environ.get),
- _Monkey(_implicit_environ, httplib=httplib,
- app_identity=app_identity))
+ if connection is None:
+ connection = _HTTPConnection(404, None)
+
+ def _factory(host, timeout):
+ connection.host = host
+ connection.timeout = timeout
+ return connection
+
+ return _Monkey(_implicit_environ,
+ HTTPConnection=_factory,
+ app_identity=app_identity)
def test_no_env_var_set(self):
from gcloud.datastore import _implicit_environ
- with self._monkey(None):
- self._callFUT()
+
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit():
+ self._callFUT()
+
self.assertEqual(_implicit_environ.DATASET_ID, None)
def test_set_from_env_var(self):
from gcloud.datastore import _implicit_environ
IMPLICIT_DATASET_ID = 'IMPLICIT'
- with self._monkey(IMPLICIT_DATASET_ID):
- self._callFUT()
+
+ with self._monkeyEnviron(IMPLICIT_DATASET_ID):
+ with self._monkeyImplicit():
+ self._callFUT()
+
self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID)
def test_set_explicit_w_env_var_set(self):
from gcloud.datastore import _implicit_environ
EXPLICIT_DATASET_ID = 'EXPLICIT'
- with self._monkey(None):
- self._callFUT(EXPLICIT_DATASET_ID)
+
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit():
+ self._callFUT(EXPLICIT_DATASET_ID)
+
self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID)
def test_set_explicit_no_env_var_set(self):
from gcloud.datastore import _implicit_environ
IMPLICIT_DATASET_ID = 'IMPLICIT'
EXPLICIT_DATASET_ID = 'EXPLICIT'
- with self._monkey(IMPLICIT_DATASET_ID):
- self._callFUT(EXPLICIT_DATASET_ID)
+
+ with self._monkeyEnviron(IMPLICIT_DATASET_ID):
+ with self._monkeyImplicit():
+ self._callFUT(EXPLICIT_DATASET_ID)
+
self.assertEqual(_implicit_environ.DATASET_ID, EXPLICIT_DATASET_ID)
def test_set_explicit_None_wo_env_var_set(self):
from gcloud.datastore import _implicit_environ
- with self._monkey(None):
- self._callFUT(None)
+
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit():
+ self._callFUT(None)
+
self.assertEqual(_implicit_environ.DATASET_ID, None)
def test_set_explicit_None_w_env_var_set(self):
from gcloud.datastore import _implicit_environ
IMPLICIT_DATASET_ID = 'IMPLICIT'
- with self._monkey(IMPLICIT_DATASET_ID):
- self._callFUT(None)
+
+ with self._monkeyEnviron(IMPLICIT_DATASET_ID):
+ with self._monkeyImplicit():
+ self._callFUT(None)
+
self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID)
def test_set_implicit_from_appengine(self):
@@ -91,8 +118,9 @@ def test_set_implicit_from_appengine(self):
APP_ENGINE_ID = 'GAE'
APP_IDENTITY = _AppIdentity(APP_ENGINE_ID)
- with self._monkey(None, app_identity=APP_IDENTITY):
- self._callFUT()
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit(app_identity=APP_IDENTITY):
+ self._callFUT()
self.assertEqual(_implicit_environ.DATASET_ID, APP_ENGINE_ID)
@@ -102,8 +130,9 @@ def test_set_implicit_both_env_and_appengine(self):
IMPLICIT_DATASET_ID = 'IMPLICIT'
APP_IDENTITY = _AppIdentity('GAE')
- with self._monkey(IMPLICIT_DATASET_ID, app_identity=APP_IDENTITY):
- self._callFUT()
+ with self._monkeyEnviron(IMPLICIT_DATASET_ID):
+ with self._monkeyImplicit(app_identity=APP_IDENTITY):
+ self._callFUT()
self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID)
@@ -111,31 +140,33 @@ def _implicit_compute_engine_helper(self, status):
from gcloud.datastore import _implicit_environ
COMPUTE_ENGINE_ID = 'GCE'
- HTTPLIB = _Httplib(COMPUTE_ENGINE_ID, status=status)
if status == 200:
EXPECTED_ID = COMPUTE_ENGINE_ID
else:
EXPECTED_ID = None
- with self._monkey(None, httplib=HTTPLIB):
- self._callFUT()
+ if status == 'RAISE':
+ connection = _TimeoutHTTPConnection()
+ else:
+ connection = _HTTPConnection(status, EXPECTED_ID)
- self.assertEqual(_implicit_environ.DATASET_ID, EXPECTED_ID)
- self.assertEqual(len(HTTPLIB._http_connections), 1)
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit(connection=connection):
+ self._callFUT()
- (host, timeout, http_connection), = HTTPLIB._http_connections
- self.assertEqual(host, '169.254.169.254')
- self.assertEqual(timeout, 0.1)
+ self.assertEqual(_implicit_environ.DATASET_ID, EXPECTED_ID)
+ self.assertEqual(connection.host, '169.254.169.254')
+ self.assertEqual(connection.timeout, 0.1)
self.assertEqual(
- http_connection._called_args,
+ connection._called_args,
[('GET', '/computeMetadata/v1/project/project-id')])
expected_kwargs = {
'headers': {
'Metadata-Flavor': 'Google',
},
}
- self.assertEqual(http_connection._called_kwargs, [expected_kwargs])
- self.assertEqual(http_connection._close_count, 1)
+ self.assertEqual(connection._called_kwargs, [expected_kwargs])
+ self.assertEqual(connection._close_count, 1)
def test_set_implicit_from_compute_engine(self):
self._implicit_compute_engine_helper(200)
@@ -151,27 +182,32 @@ def test_set_implicit_both_appengine_and_compute(self):
APP_ENGINE_ID = 'GAE'
APP_IDENTITY = _AppIdentity(APP_ENGINE_ID)
- HTTPLIB = _Httplib('GCE')
+ connection = _HTTPConnection(200, 'GCE')
- with self._monkey(None, httplib=HTTPLIB, app_identity=APP_IDENTITY):
- self._callFUT()
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit(connection=connection,
+ app_identity=APP_IDENTITY):
+ self._callFUT()
self.assertEqual(_implicit_environ.DATASET_ID, APP_ENGINE_ID)
- self.assertEqual(len(HTTPLIB._http_connections), 0)
+ self.assertEqual(connection.host, None)
+ self.assertEqual(connection.timeout, None)
def test_set_implicit_three_env_appengine_and_compute(self):
from gcloud.datastore import _implicit_environ
IMPLICIT_DATASET_ID = 'IMPLICIT'
APP_IDENTITY = _AppIdentity('GAE')
- HTTPLIB = _Httplib('GCE')
+ connection = _HTTPConnection(200, 'GCE')
- with self._monkey(IMPLICIT_DATASET_ID, httplib=HTTPLIB,
- app_identity=APP_IDENTITY):
- self._callFUT()
+ with self._monkeyEnviron(IMPLICIT_DATASET_ID):
+ with self._monkeyImplicit(connection=connection,
+ app_identity=APP_IDENTITY):
+ self._callFUT()
self.assertEqual(_implicit_environ.DATASET_ID, IMPLICIT_DATASET_ID)
- self.assertEqual(len(HTTPLIB._http_connections), 0)
+ self.assertEqual(connection.host, None)
+ self.assertEqual(connection.timeout, None)
class Test_set_default_connection(unittest2.TestCase):
@@ -281,10 +317,11 @@ def read(self):
return self.data
-class _HTTPConnection(object):
+class _BaseHTTPConnection(object):
+
+ host = timeout = None
- def __init__(self, parent):
- self.parent = parent
+ def __init__(self):
self._close_count = 0
self._called_args = []
self._called_kwargs = []
@@ -293,26 +330,23 @@ def request(self, method, uri, **kwargs):
self._called_args.append((method, uri))
self._called_kwargs.append(kwargs)
- def getresponse(self):
- import socket
-
- if self.parent.status == 'RAISE':
- raise socket.timeout('timed out')
- else:
- return _HTTPResponse(self.parent.status, self.parent.project_id)
-
def close(self):
self._close_count += 1
-class _Httplib(object):
+class _HTTPConnection(_BaseHTTPConnection):
- def __init__(self, project_id, status=200):
- self.project_id = project_id
+ def __init__(self, status, project_id):
+ super(_HTTPConnection, self).__init__()
self.status = status
- self._http_connections = []
+ self.project_id = project_id
+
+ def getresponse(self):
+ return _HTTPResponse(self.status, self.project_id)
+
+
+class _TimeoutHTTPConnection(_BaseHTTPConnection):
- def HTTPConnection(self, host, timeout=None):
- result = _HTTPConnection(self)
- self._http_connections.append((host, timeout, result))
- return result
+ def getresponse(self):
+ import socket
+ raise socket.timeout('timed out')
diff --git a/gcloud/datastore/test_api.py b/gcloud/datastore/test_api.py
index 605d6acf6a9f..587fb9448a0c 100644
--- a/gcloud/datastore/test_api.py
+++ b/gcloud/datastore/test_api.py
@@ -864,7 +864,7 @@ def test_w_explicit_connection(self):
result = self._callFUT(INCOMPLETE_KEY, NUM_IDS, connection=CONNECTION)
# Check the IDs returned match.
- self.assertEqual([key.id for key in result], range(NUM_IDS))
+ self.assertEqual([key.id for key in result], list(range(NUM_IDS)))
# Check connection is called correctly.
self.assertEqual(CONNECTION._called_dataset_id, DATASET_ID)
@@ -884,7 +884,7 @@ def test_w_implicit_connection(self):
result = self._callFUT(INCOMPLETE_KEY, NUM_IDS)
# Check the IDs returned.
- self.assertEqual([key.id for key in result], range(NUM_IDS))
+ self.assertEqual([key.id for key in result], list(range(NUM_IDS)))
def test_with_already_completed_key(self):
from gcloud.datastore import _implicit_environ
diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py
index 49425a5fb04a..053e2e0c75b3 100644
--- a/gcloud/datastore/test_connection.py
+++ b/gcloud/datastore/test_connection.py
@@ -238,11 +238,11 @@ def test_lookup_single_key_empty_response_w_eventual(self):
_compare_key_pb_after_request(self, key_pb, keys[0])
self.assertEqual(request.read_options.read_consistency,
datastore_pb.ReadOptions.EVENTUAL)
- self.assertEqual(request.read_options.transaction, '')
+ self.assertEqual(request.read_options.transaction, b'')
def test_lookup_single_key_empty_response_w_eventual_and_transaction(self):
DATASET_ID = 'DATASET'
- TRANSACTION = 'TRANSACTION'
+ TRANSACTION = b'TRANSACTION'
key_pb = self._make_key_pb(DATASET_ID)
conn = self._makeOne()
self.assertRaises(ValueError, conn.lookup, DATASET_ID, key_pb,
@@ -252,7 +252,7 @@ def test_lookup_single_key_empty_response_w_transaction(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
DATASET_ID = 'DATASET'
- TRANSACTION = 'TRANSACTION'
+ TRANSACTION = b'TRANSACTION'
key_pb = self._make_key_pb(DATASET_ID)
rsp_pb = datastore_pb.LookupResponse()
conn = self._makeOne()
@@ -455,7 +455,7 @@ def test_run_query_w_eventual_no_transaction(self):
self.assertEqual(request.query, q_pb)
self.assertEqual(request.read_options.read_consistency,
datastore_pb.ReadOptions.EVENTUAL)
- self.assertEqual(request.read_options.transaction, '')
+ self.assertEqual(request.read_options.transaction, b'')
def test_run_query_wo_eventual_w_transaction(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
@@ -463,7 +463,7 @@ def test_run_query_wo_eventual_w_transaction(self):
DATASET_ID = 'DATASET'
KIND = 'Nonesuch'
CURSOR = b'\x00'
- TRANSACTION = 'TRANSACTION'
+ TRANSACTION = b'TRANSACTION'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
@@ -503,7 +503,7 @@ def test_run_query_w_eventual_and_transaction(self):
DATASET_ID = 'DATASET'
KIND = 'Nonesuch'
CURSOR = b'\x00'
- TRANSACTION = 'TRANSACTION'
+ TRANSACTION = b'TRANSACTION'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
@@ -584,7 +584,7 @@ def test_begin_transaction_default_serialize(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
DATASET_ID = 'DATASET'
- TRANSACTION = 'TRANSACTION'
+ TRANSACTION = b'TRANSACTION'
rsp_pb = datastore_pb.BeginTransactionResponse()
rsp_pb.transaction = TRANSACTION
conn = self._makeOne()
@@ -609,7 +609,7 @@ def test_begin_transaction_explicit_serialize(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
DATASET_ID = 'DATASET'
- TRANSACTION = 'TRANSACTION'
+ TRANSACTION = b'TRANSACTION'
rsp_pb = datastore_pb.BeginTransactionResponse()
rsp_pb.transaction = TRANSACTION
conn = self._makeOne()
@@ -660,7 +660,7 @@ def test_commit_wo_transaction(self):
rq_class = datastore_pb.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(request.transaction, '')
+ self.assertEqual(request.transaction, b'')
self.assertEqual(request.mutation, mutation)
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
@@ -686,7 +686,7 @@ def test_commit_w_transaction(self):
'commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
- result = conn.commit(DATASET_ID, mutation, 'xact')
+ result = conn.commit(DATASET_ID, mutation, b'xact')
self.assertEqual(result.index_updates, 0)
self.assertEqual(list(result.insert_auto_id_key), [])
cw = http._called_with
@@ -694,14 +694,14 @@ def test_commit_w_transaction(self):
rq_class = datastore_pb.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
- self.assertEqual(request.transaction, 'xact')
+ self.assertEqual(request.transaction, b'xact')
self.assertEqual(request.mutation, mutation)
self.assertEqual(request.mode, rq_class.TRANSACTIONAL)
def test_rollback_ok(self):
from gcloud.datastore import _datastore_v1_pb2 as datastore_pb
DATASET_ID = 'DATASET'
- TRANSACTION = 'xact'
+ TRANSACTION = b'xact'
rsp_pb = datastore_pb.RollbackResponse()
conn = self._makeOne()
@@ -821,7 +821,7 @@ def allocate_ids(self, dataset_id, key_pbs):
self._called_dataset_id = dataset_id
self._called_key_pbs = key_pbs
num_pbs = len(key_pbs)
- return [_KeyProto(i) for i in range(num_pbs)]
+ return [_KeyProto(i) for i in list(range(num_pbs))]
class _PathElementProto(object):
diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py
index b087444ab97d..bf2a5d676e69 100644
--- a/gcloud/datastore/test_helpers.py
+++ b/gcloud/datastore/test_helpers.py
@@ -175,7 +175,7 @@ def test_datetime_naive(self):
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc)
name, value = self._callFUT(naive)
self.assertEqual(name, 'timestamp_microseconds_value')
- self.assertEqual(value / 1000000, calendar.timegm(utc.timetuple()))
+ self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
self.assertEqual(value % 1000000, 4375)
def test_datetime_w_zone(self):
@@ -186,7 +186,7 @@ def test_datetime_w_zone(self):
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc)
name, value = self._callFUT(utc)
self.assertEqual(name, 'timestamp_microseconds_value')
- self.assertEqual(value / 1000000, calendar.timegm(utc.timetuple()))
+ self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
self.assertEqual(value % 1000000, 4375)
def test_key(self):
@@ -227,8 +227,12 @@ def test_long_too_large(self):
self.assertRaises(ValueError, self._callFUT, too_large)
def test_native_str(self):
+ import six
name, value = self._callFUT('str')
- self.assertEqual(name, 'blob_value')
+ if six.PY2:
+ self.assertEqual(name, 'blob_value')
+ else: # pragma: NO COVER
+ self.assertEqual(name, 'string_value')
self.assertEqual(value, 'str')
def test_bytes(self):
@@ -382,7 +386,7 @@ def test_datetime(self):
utc = datetime.datetime(2014, 9, 16, 10, 19, 32, 4375, pytz.utc)
self._callFUT(pb, utc)
value = pb.timestamp_microseconds_value
- self.assertEqual(value / 1000000, calendar.timegm(utc.timetuple()))
+ self.assertEqual(value // 1000000, calendar.timegm(utc.timetuple()))
self.assertEqual(value % 1000000, 4375)
def test_key(self):
@@ -439,9 +443,13 @@ def test_long(self):
self.assertEqual(value, must_be_long)
def test_native_str(self):
+ import six
pb = self._makePB()
self._callFUT(pb, 'str')
- value = pb.blob_value
+ if six.PY2:
+ value = pb.blob_value
+ else: # pragma: NO COVER
+ value = pb.string_value
self.assertEqual(value, 'str')
def test_bytes(self):
@@ -463,7 +471,7 @@ def test_entity_empty_wo_key(self):
entity = Entity()
self._callFUT(pb, entity)
value = pb.entity_value
- self.assertEqual(value.key.SerializeToString(), '')
+ self.assertEqual(value.key.SerializeToString(), b'')
props = list(value.property)
self.assertEqual(len(props), 0)
diff --git a/gcloud/storage/blob.py b/gcloud/storage/blob.py
index 31927bed640d..4528b6f2a665 100644
--- a/gcloud/storage/blob.py
+++ b/gcloud/storage/blob.py
@@ -19,8 +19,10 @@
import os
import time
import datetime
-from StringIO import StringIO
-import urllib
+from io import BytesIO
+
+import six
+from six.moves.urllib.parse import quote # pylint: disable=F0401
from _gcloud_vendor.apitools.base.py import http_wrapper
from _gcloud_vendor.apitools.base.py import transfer
@@ -123,7 +125,7 @@ def path(self):
elif not self.name:
raise ValueError('Cannot determine path without a blob name.')
- return self.bucket.path + '/o/' + urllib.quote(self.name, safe='')
+ return self.bucket.path + '/o/' + quote(self.name, safe='')
@property
def public_url(self):
@@ -135,7 +137,7 @@ def public_url(self):
return '{storage_base_url}/{bucket_name}/{quoted_name}'.format(
storage_base_url='http://commondatastorage.googleapis.com',
bucket_name=self.bucket.name,
- quoted_name=urllib.quote(self.name, safe=''))
+ quoted_name=quote(self.name, safe=''))
def generate_signed_url(self, expiration, method='GET'):
"""Generates a signed URL for this blob.
@@ -160,7 +162,7 @@ def generate_signed_url(self, expiration, method='GET'):
"""
resource = '/{bucket_name}/{quoted_name}'.format(
bucket_name=self.bucket.name,
- quoted_name=urllib.quote(self.name, safe=''))
+ quoted_name=quote(self.name, safe=''))
return generate_signed_url(
self.connection.credentials, resource=resource,
@@ -253,11 +255,11 @@ def download_to_filename(self, filename):
def download_as_string(self):
"""Download the contents of this blob as a string.
- :rtype: string
+ :rtype: bytes
:returns: The data stored in this blob.
:raises: :class:`gcloud.exceptions.NotFound`
"""
- string_buffer = StringIO()
+ string_buffer = BytesIO()
self.download_to_file(string_buffer)
return string_buffer.getvalue()
@@ -369,16 +371,19 @@ def upload_from_string(self, data, content_type='text/plain'):
`lifecycle `_
API documents for details.
- :type data: string
- :param data: The data to store in this blob.
+ :type data: bytes or text
+ :param data: The data to store in this blob. If the value is
+ text, it will be encoded as UTF-8.
:rtype: :class:`Blob`
:returns: The updated Blob object.
"""
- string_buffer = StringIO()
+ if isinstance(data, six.text_type):
+ data = data.encode('utf-8')
+ string_buffer = BytesIO()
string_buffer.write(data)
self.upload_from_file(file_obj=string_buffer, rewind=True,
- size=string_buffer.len,
+ size=len(data),
content_type=content_type)
return self
diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py
index dd51ccc9c9c2..666f07e6acd7 100644
--- a/gcloud/storage/connection.py
+++ b/gcloud/storage/connection.py
@@ -15,7 +15,8 @@
"""Create / interact with gcloud storage connections."""
import json
-import urllib
+
+from six.moves.urllib.parse import urlencode # pylint: disable=F0401
from gcloud.connection import Connection as _Base
from gcloud.exceptions import make_exception
@@ -126,7 +127,7 @@ def build_api_url(self, path, query_params=None, api_base_url=None,
query_params = query_params or {}
query_params.update({'project': self.project})
- url += '?' + urllib.urlencode(query_params)
+ url += '?' + urlencode(query_params)
return url
diff --git a/gcloud/storage/test___init__.py b/gcloud/storage/test___init__.py
index c68d5b76a462..f6ad2510fab4 100644
--- a/gcloud/storage/test___init__.py
+++ b/gcloud/storage/test___init__.py
@@ -86,30 +86,35 @@ def _callFUT(self, bucket=None):
from gcloud.storage import set_default_bucket
return set_default_bucket(bucket=bucket)
- def _monkey(self, implicit_bucket_name, connection=None):
- from contextlib import nested
+ def _monkeyEnviron(self, implicit_bucket_name):
import os
from gcloud._testing import _Monkey
from gcloud.storage import _BUCKET_ENV_VAR_NAME
- from gcloud.storage import _implicit_environ
environ = {_BUCKET_ENV_VAR_NAME: implicit_bucket_name}
- return nested(_Monkey(os, getenv=environ.get),
- _Monkey(_implicit_environ, CONNECTION=connection))
+ return _Monkey(os, getenv=environ.get)
+
+ def _monkeyImplicit(self, connection):
+ from gcloud._testing import _Monkey
+ from gcloud.storage import _implicit_environ
+
+ return _Monkey(_implicit_environ, CONNECTION=connection)
def test_no_env_var_set(self):
from gcloud.storage import _implicit_environ
- with self._monkey(None):
- self._callFUT()
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit(None):
+ self._callFUT()
self.assertEqual(_implicit_environ.BUCKET, None)
def test_set_from_env_var(self):
from gcloud.storage import _implicit_environ
IMPLICIT_BUCKET_NAME = 'IMPLICIT'
CONNECTION = object()
- with self._monkey(IMPLICIT_BUCKET_NAME, connection=CONNECTION):
- self._callFUT()
+ with self._monkeyEnviron(IMPLICIT_BUCKET_NAME):
+ with self._monkeyImplicit(CONNECTION):
+ self._callFUT()
self.assertEqual(_implicit_environ.BUCKET.name, IMPLICIT_BUCKET_NAME)
self.assertEqual(_implicit_environ.BUCKET.connection, CONNECTION)
@@ -117,8 +122,9 @@ def test_set_from_env_var(self):
def test_set_explicit_w_env_var_set(self):
from gcloud.storage import _implicit_environ
EXPLICIT_BUCKET = object()
- with self._monkey(None):
- self._callFUT(EXPLICIT_BUCKET)
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit(None):
+ self._callFUT(EXPLICIT_BUCKET)
self.assertEqual(_implicit_environ.BUCKET, EXPLICIT_BUCKET)
def test_set_explicit_no_env_var_set(self):
@@ -126,30 +132,34 @@ def test_set_explicit_no_env_var_set(self):
IMPLICIT_BUCKET_NAME = 'IMPLICIT'
CONNECTION = object()
EXPLICIT_BUCKET = object()
- with self._monkey(IMPLICIT_BUCKET_NAME, connection=CONNECTION):
- self._callFUT(EXPLICIT_BUCKET)
+ with self._monkeyEnviron(IMPLICIT_BUCKET_NAME):
+ with self._monkeyImplicit(CONNECTION):
+ self._callFUT(EXPLICIT_BUCKET)
self.assertEqual(_implicit_environ.BUCKET, EXPLICIT_BUCKET)
def test_set_explicit_None_wo_env_var_set(self):
from gcloud.storage import _implicit_environ
CONNECTION = object()
- with self._monkey(None, connection=CONNECTION):
- self._callFUT(None)
+ with self._monkeyEnviron(None):
+ with self._monkeyImplicit(CONNECTION):
+ self._callFUT(None)
self.assertEqual(_implicit_environ.BUCKET, None)
def test_set_explicit_None_wo_connection_set(self):
from gcloud.storage import _implicit_environ
IMPLICIT_BUCKET_NAME = 'IMPLICIT'
- with self._monkey(IMPLICIT_BUCKET_NAME, connection=None):
- self._callFUT(None)
+ with self._monkeyEnviron(IMPLICIT_BUCKET_NAME):
+ with self._monkeyImplicit(None):
+ self._callFUT(None)
self.assertEqual(_implicit_environ.BUCKET, None)
def test_set_explicit_None_w_env_var_set(self):
from gcloud.storage import _implicit_environ
IMPLICIT_BUCKET_NAME = 'IMPLICIT'
CONNECTION = object()
- with self._monkey(IMPLICIT_BUCKET_NAME, connection=CONNECTION):
- self._callFUT(None)
+ with self._monkeyEnviron(IMPLICIT_BUCKET_NAME):
+ with self._monkeyImplicit(CONNECTION):
+ self._callFUT(None)
self.assertEqual(_implicit_environ.BUCKET.name, IMPLICIT_BUCKET_NAME)
self.assertEqual(_implicit_environ.BUCKET.connection, CONNECTION)
diff --git a/gcloud/storage/test_blob.py b/gcloud/storage/test_blob.py
index 69e676893356..53ff306e1b39 100644
--- a/gcloud/storage/test_blob.py
+++ b/gcloud/storage/test_blob.py
@@ -229,40 +229,42 @@ def test_delete(self):
self.assertFalse(blob.exists())
def test_download_to_file(self):
- import httplib
- from StringIO import StringIO
+ from six.moves.http_client import OK
+ from six.moves.http_client import PARTIAL_CONTENT
+ from io import BytesIO
BLOB_NAME = 'blob-name'
- chunk1_response = {'status': httplib.PARTIAL_CONTENT,
+ chunk1_response = {'status': PARTIAL_CONTENT,
'content-range': 'bytes 0-2/6'}
- chunk2_response = {'status': httplib.OK,
+ chunk2_response = {'status': OK,
'content-range': 'bytes 3-5/6'}
connection = _Connection(
- (chunk1_response, 'abc'),
- (chunk2_response, 'def'),
+ (chunk1_response, b'abc'),
+ (chunk2_response, b'def'),
)
bucket = _Bucket(connection)
MEDIA_LINK = 'http://example.com/media/'
properties = {'mediaLink': MEDIA_LINK}
blob = self._makeOne(bucket, BLOB_NAME, properties)
blob.CHUNK_SIZE = 3
- fh = StringIO()
+ fh = BytesIO()
blob.download_to_file(fh)
- self.assertEqual(fh.getvalue(), 'abcdef')
+ self.assertEqual(fh.getvalue(), b'abcdef')
def test_download_to_filename(self):
- import httplib
import os
import time
import datetime
+ from six.moves.http_client import OK
+ from six.moves.http_client import PARTIAL_CONTENT
from tempfile import NamedTemporaryFile
BLOB_NAME = 'blob-name'
- chunk1_response = {'status': httplib.PARTIAL_CONTENT,
+ chunk1_response = {'status': PARTIAL_CONTENT,
'content-range': 'bytes 0-2/6'}
- chunk2_response = {'status': httplib.OK,
+ chunk2_response = {'status': OK,
'content-range': 'bytes 3-5/6'}
connection = _Connection(
- (chunk1_response, 'abc'),
- (chunk2_response, 'def'),
+ (chunk1_response, b'abc'),
+ (chunk2_response, b'def'),
)
bucket = _Bucket(connection)
MEDIA_LINK = 'http://example.com/media/'
@@ -273,7 +275,7 @@ def test_download_to_filename(self):
with NamedTemporaryFile() as f:
blob.download_to_filename(f.name)
f.flush()
- with open(f.name) as g:
+ with open(f.name, 'rb') as g:
wrote = g.read()
mtime = os.path.getmtime(f.name)
updatedTime = time.mktime(
@@ -281,19 +283,20 @@ def test_download_to_filename(self):
blob.properties['updated'],
'%Y-%m-%dT%H:%M:%S.%fz').timetuple()
)
- self.assertEqual(wrote, 'abcdef')
+ self.assertEqual(wrote, b'abcdef')
self.assertEqual(mtime, updatedTime)
def test_download_as_string(self):
- import httplib
+ from six.moves.http_client import OK
+ from six.moves.http_client import PARTIAL_CONTENT
BLOB_NAME = 'blob-name'
- chunk1_response = {'status': httplib.PARTIAL_CONTENT,
+ chunk1_response = {'status': PARTIAL_CONTENT,
'content-range': 'bytes 0-2/6'}
- chunk2_response = {'status': httplib.OK,
+ chunk2_response = {'status': OK,
'content-range': 'bytes 3-5/6'}
connection = _Connection(
- (chunk1_response, 'abc'),
- (chunk2_response, 'def'),
+ (chunk1_response, b'abc'),
+ (chunk2_response, b'def'),
)
bucket = _Bucket(connection)
MEDIA_LINK = 'http://example.com/media/'
@@ -301,18 +304,18 @@ def test_download_as_string(self):
blob = self._makeOne(bucket, BLOB_NAME, properties)
blob.CHUNK_SIZE = 3
fetched = blob.download_as_string()
- self.assertEqual(fetched, 'abcdef')
+ self.assertEqual(fetched, b'abcdef')
def test_upload_from_file_simple(self):
- import httplib
+ from six.moves.http_client import OK
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
from tempfile import NamedTemporaryFile
- from urlparse import parse_qsl
- from urlparse import urlsplit
BLOB_NAME = 'blob-name'
- DATA = 'ABCDEF'
- response = {'status': httplib.OK}
+ DATA = b'ABCDEF'
+ response = {'status': OK}
connection = _Connection(
- (response, ''),
+ (response, b''),
)
bucket = _Bucket(connection)
blob = self._makeOne(bucket, BLOB_NAME)
@@ -337,24 +340,24 @@ def test_upload_from_file_simple(self):
self.assertEqual(headers['Content-Type'], 'application/unknown')
def test_upload_from_file_resumable(self):
- import httplib
+ from six.moves.http_client import OK
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
from tempfile import NamedTemporaryFile
- from urlparse import parse_qsl
- from urlparse import urlsplit
from gcloud._testing import _Monkey
from _gcloud_vendor.apitools.base.py import http_wrapper
from _gcloud_vendor.apitools.base.py import transfer
BLOB_NAME = 'blob-name'
UPLOAD_URL = 'http://example.com/upload/name/key'
- DATA = 'ABCDEF'
- loc_response = {'status': httplib.OK, 'location': UPLOAD_URL}
+ DATA = b'ABCDEF'
+ loc_response = {'status': OK, 'location': UPLOAD_URL}
chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
'range': 'bytes 0-4'}
- chunk2_response = {'status': httplib.OK}
+ chunk2_response = {'status': OK}
connection = _Connection(
- (loc_response, ''),
- (chunk1_response, ''),
- (chunk2_response, ''),
+ (loc_response, b''),
+ (chunk1_response, b''),
+ (chunk2_response, b''),
)
bucket = _Bucket(connection)
blob = self._makeOne(bucket, BLOB_NAME)
@@ -396,18 +399,18 @@ def test_upload_from_file_resumable(self):
self.assertEqual(headers['Content-Range'], 'bytes 5-5/6')
def test_upload_from_file_w_slash_in_name(self):
- import httplib
+ from six.moves.http_client import OK
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
from tempfile import NamedTemporaryFile
- from urlparse import parse_qsl
- from urlparse import urlsplit
from _gcloud_vendor.apitools.base.py import http_wrapper
BLOB_NAME = 'parent/child'
UPLOAD_URL = 'http://example.com/upload/name/parent%2Fchild'
- DATA = 'ABCDEF'
- loc_response = {'status': httplib.OK, 'location': UPLOAD_URL}
+ DATA = b'ABCDEF'
+ loc_response = {'status': OK, 'location': UPLOAD_URL}
chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
'range': 'bytes 0-4'}
- chunk2_response = {'status': httplib.OK}
+ chunk2_response = {'status': OK}
connection = _Connection(
(loc_response, ''),
(chunk1_response, ''),
@@ -436,18 +439,18 @@ def test_upload_from_file_w_slash_in_name(self):
self.assertEqual(headers['Content-Type'], 'application/unknown')
def test_upload_from_filename(self):
- import httplib
+ from six.moves.http_client import OK
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
from tempfile import NamedTemporaryFile
- from urlparse import parse_qsl
- from urlparse import urlsplit
from _gcloud_vendor.apitools.base.py import http_wrapper
BLOB_NAME = 'blob-name'
UPLOAD_URL = 'http://example.com/upload/name/key'
- DATA = 'ABCDEF'
- loc_response = {'status': httplib.OK, 'location': UPLOAD_URL}
+ DATA = b'ABCDEF'
+ loc_response = {'status': OK, 'location': UPLOAD_URL}
chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
'range': 'bytes 0-4'}
- chunk2_response = {'status': httplib.OK}
+ chunk2_response = {'status': OK}
connection = _Connection(
(loc_response, ''),
(chunk1_response, ''),
@@ -475,18 +478,18 @@ def test_upload_from_filename(self):
self.assertEqual(headers['Content-Length'], '6')
self.assertEqual(headers['Content-Type'], 'image/jpeg')
- def test_upload_from_string(self):
- import httplib
- from urlparse import parse_qsl
- from urlparse import urlsplit
+ def test_upload_from_string_w_bytes(self):
+ from six.moves.http_client import OK
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
from _gcloud_vendor.apitools.base.py import http_wrapper
BLOB_NAME = 'blob-name'
UPLOAD_URL = 'http://example.com/upload/name/key'
- DATA = 'ABCDEF'
- loc_response = {'status': httplib.OK, 'location': UPLOAD_URL}
+ DATA = b'ABCDEF'
+ loc_response = {'status': OK, 'location': UPLOAD_URL}
chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
'range': 'bytes 0-4'}
- chunk2_response = {'status': httplib.OK}
+ chunk2_response = {'status': OK}
connection = _Connection(
(loc_response, ''),
(chunk1_response, ''),
@@ -510,6 +513,45 @@ def test_upload_from_string(self):
[(x.title(), str(y)) for x, y in rq[0]['headers'].items()])
self.assertEqual(headers['Content-Length'], '6')
self.assertEqual(headers['Content-Type'], 'text/plain')
+ self.assertEqual(rq[0]['body'], DATA)
+
+ def test_upload_from_string_w_text(self):
+ from six.moves.http_client import OK
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
+ from _gcloud_vendor.apitools.base.py import http_wrapper
+ BLOB_NAME = 'blob-name'
+ UPLOAD_URL = 'http://example.com/upload/name/key'
+ DATA = u'ABCDEF\u1234'
+ ENCODED = DATA.encode('utf-8')
+ loc_response = {'status': OK, 'location': UPLOAD_URL}
+ chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
+ 'range': 'bytes 0-4'}
+ chunk2_response = {'status': OK}
+ connection = _Connection(
+ (loc_response, ''),
+ (chunk1_response, ''),
+ (chunk2_response, ''),
+ )
+ bucket = _Bucket(connection)
+ blob = self._makeOne(bucket, BLOB_NAME)
+ blob.CHUNK_SIZE = 5
+ blob.upload_from_string(DATA)
+ rq = connection.http._requested
+ self.assertEqual(len(rq), 1)
+ self.assertEqual(rq[0]['method'], 'POST')
+ uri = rq[0]['uri']
+ scheme, netloc, path, qs, _ = urlsplit(uri)
+ self.assertEqual(scheme, 'http')
+ self.assertEqual(netloc, 'example.com')
+ self.assertEqual(path, '/b/name/o')
+ self.assertEqual(dict(parse_qsl(qs)),
+ {'uploadType': 'media', 'name': BLOB_NAME})
+ headers = dict(
+ [(x.title(), str(y)) for x, y in rq[0]['headers'].items()])
+ self.assertEqual(headers['Content-Length'], str(len(ENCODED)))
+ self.assertEqual(headers['Content-Type'], 'text/plain')
+ self.assertEqual(rq[0]['body'], ENCODED)
def test_make_public(self):
from gcloud.storage.acl import _ACLEntity
@@ -875,9 +917,9 @@ def api_request(self, **kw):
def build_api_url(self, path, query_params=None,
api_base_url=API_BASE_URL, upload=False):
- from urllib import urlencode
- from urlparse import urlsplit
- from urlparse import urlunsplit
+ from six.moves.urllib.parse import urlencode
+ from six.moves.urllib.parse import urlsplit
+ from six.moves.urllib.parse import urlunsplit
# mimic the build_api_url interface, but avoid unused param and
# missed coverage errors
upload = not upload # pragma NO COVER
diff --git a/gcloud/storage/test_connection.py b/gcloud/storage/test_connection.py
index 74c5d9dfc005..4666a9abfa84 100644
--- a/gcloud/storage/test_connection.py
+++ b/gcloud/storage/test_connection.py
@@ -151,8 +151,8 @@ def test_build_api_url_no_extra_query_params(self):
self.assertEqual(conn.build_api_url('/foo'), URI)
def test_build_api_url_w_extra_query_params(self):
- from urlparse import parse_qsl
- from urlparse import urlsplit
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
PROJECT = 'project'
conn = self._makeOne(PROJECT)
uri = conn.build_api_url('/foo', {'bar': 'baz'})
@@ -283,8 +283,8 @@ def test_api_request_wo_json_expected(self):
'CONTENT')
def test_api_request_w_query_params(self):
- from urlparse import parse_qsl
- from urlparse import urlsplit
+ from six.moves.urllib.parse import parse_qsl
+ from six.moves.urllib.parse import urlsplit
PROJECT = 'project'
conn = self._makeOne(PROJECT)
http = conn._http = Http(
diff --git a/gcloud/test_credentials.py b/gcloud/test_credentials.py
index 0f0176bded11..7f352a70fac1 100644
--- a/gcloud/test_credentials.py
+++ b/gcloud/test_credentials.py
@@ -22,7 +22,7 @@ def test_get_for_service_account_p12_wo_scope(self):
from gcloud import credentials
from gcloud._testing import _Monkey
CLIENT_EMAIL = 'phred@example.com'
- PRIVATE_KEY = 'SEEkR1t'
+ PRIVATE_KEY = b'SEEkR1t'
client = _Client()
with _Monkey(credentials, client=client):
with NamedTemporaryFile() as file_obj:
@@ -43,7 +43,7 @@ def test_get_for_service_account_p12_w_scope(self):
from gcloud import credentials
from gcloud._testing import _Monkey
CLIENT_EMAIL = 'phred@example.com'
- PRIVATE_KEY = 'SEEkR1t'
+ PRIVATE_KEY = b'SEEkR1t'
SCOPE = 'SCOPE'
client = _Client()
with _Monkey(credentials, client=client):
@@ -69,13 +69,14 @@ def _callFUT(self, *args, **kwargs):
def test_w_expiration_int(self):
import base64
- import urlparse
+ from six.moves.urllib.parse import parse_qs
+ from six.moves.urllib.parse import urlsplit
from gcloud._testing import _Monkey
from gcloud import credentials as MUT
ENDPOINT = 'http://api.example.com'
RESOURCE = '/name/path'
- SIGNED = base64.b64encode('DEADBEEF')
+ SIGNED = base64.b64encode(b'DEADBEEF')
CREDENTIALS = _Credentials()
def _get_signed_query_params(*args):
@@ -90,13 +91,14 @@ def _get_signed_query_params(*args):
url = self._callFUT(CREDENTIALS, RESOURCE, 1000,
api_access_endpoint=ENDPOINT)
- scheme, netloc, path, qs, frag = urlparse.urlsplit(url)
+ scheme, netloc, path, qs, frag = urlsplit(url)
self.assertEqual(scheme, 'http')
self.assertEqual(netloc, 'api.example.com')
self.assertEqual(path, RESOURCE)
- params = urlparse.parse_qs(qs)
+ params = parse_qs(qs)
self.assertEqual(len(params), 3)
- self.assertEqual(params['Signature'], [SIGNED])
+ # In Py3k, parse_qs gives us text values:
+ self.assertEqual(params['Signature'], [SIGNED.decode('ascii')])
self.assertEqual(params['Expires'], ['1000'])
self.assertEqual(params['GoogleAccessId'],
[_Credentials.service_account_name])
@@ -140,17 +142,17 @@ def _run_test_with_credentials(self, credentials, account_name):
sha256 = _SHA256()
EXPIRATION = '100'
- SIGNATURE_STRING = 'dummy_signature'
+ SIGNATURE_STRING = b'dummy_signature'
with _Monkey(MUT, crypt=crypt, RSA=rsa, PKCS1_v1_5=pkcs_v1_5,
SHA256=sha256):
result = self._callFUT(credentials, EXPIRATION, SIGNATURE_STRING)
if crypt._pkcs12_key_as_pem_called:
self.assertEqual(crypt._private_key_text,
- base64.b64encode('dummy_private_key_text'))
+ base64.b64encode(b'dummy_private_key_text'))
self.assertEqual(crypt._private_key_password, 'notasecret')
self.assertEqual(sha256._signature_string, SIGNATURE_STRING)
- SIGNED = base64.b64encode('DEADBEEF')
+ SIGNED = base64.b64encode(b'DEADBEEF')
expected_query = {
'Expires': EXPIRATION,
'GoogleAccessId': account_name,
@@ -164,7 +166,7 @@ def test_signed_jwt_for_p12(self):
scopes = []
ACCOUNT_NAME = 'dummy_service_account_name'
credentials = client.SignedJwtAssertionCredentials(
- ACCOUNT_NAME, 'dummy_private_key_text', scopes)
+ ACCOUNT_NAME, b'dummy_private_key_text', scopes)
self._run_test_with_credentials(credentials, ACCOUNT_NAME)
def test_service_account_via_json_key(self):
@@ -203,7 +205,7 @@ def test_signed_jwt_for_p12(self):
from gcloud import credentials as MUT
scopes = []
- PRIVATE_KEY = 'dummy_private_key_text'
+ PRIVATE_KEY = b'dummy_private_key_text'
credentials = client.SignedJwtAssertionCredentials(
'dummy_service_account_name', PRIVATE_KEY, scopes)
crypt = _Crypt()
@@ -376,7 +378,7 @@ def new(self, pem_key):
def sign(self, signature_hash):
self._signature_hash = signature_hash
- return 'DEADBEEF'
+ return b'DEADBEEF'
class _SHA256(object):
diff --git a/setup.py b/setup.py
index ad871d9228da..1fde6828a644 100644
--- a/setup.py
+++ b/setup.py
@@ -43,6 +43,8 @@
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
'Topic :: Internet',
]
)
diff --git a/tox.ini b/tox.ini
index fd0a998a4e49..574c1218025e 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
envlist =
- py26,py27,cover,docs,lint
+ py26,py27,py34,cover,docs,lint
[testenv:py26]
commands =
@@ -8,7 +8,7 @@ commands =
deps =
nose
unittest2
- protobuf==2.6.0
+ protobuf==3.0.0-alpha-1
[testenv]
install_command =
@@ -18,6 +18,7 @@ commands =
deps =
nose
unittest2
+ protobuf==3.0.0-alpha-1
[testenv:cover]
basepython =
@@ -27,6 +28,7 @@ commands =
deps =
nose
unittest2
+ protobuf==3.0.0-alpha-1
coverage
nosexcover
@@ -62,6 +64,7 @@ deps =
pep8
pylint
unittest2
+ protobuf==3.0.0-alpha-1
[testenv:regression]
basepython =
@@ -70,3 +73,13 @@ commands =
{toxinidir}/scripts/run_regression.sh
deps =
unittest2
+ protobuf==3.0.0-alpha-1
+
+[testenv:regression3]
+basepython =
+ python3.4
+commands =
+ {toxinidir}/scripts/run_regression.sh
+deps =
+ unittest2
+ protobuf==3.0.0-alpha-1