diff --git a/docs/datastore-api.rst b/docs/datastore-api.rst index 5d99a025caf3..6cfbc3cd27c4 100644 --- a/docs/datastore-api.rst +++ b/docs/datastore-api.rst @@ -56,3 +56,9 @@ Transactions :members: :undoc-members: :show-inheritance: + +Helper functions +---------------- + +.. automodule:: gcloud.datastore.helpers + :members: diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index 410cf8e534a2..e2815d98b587 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -2,7 +2,7 @@ from gcloud import connection from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore import _helpers +from gcloud.datastore import helpers from gcloud.datastore.dataset import Dataset @@ -376,7 +376,7 @@ def save_entity(self, dataset_id, key_pb, properties): prop.name = name # Set the appropriate value. - _helpers._set_protobuf_value(prop.value, value) + helpers._set_protobuf_value(prop.value, value) # If this is in a transaction, we should just return True. The # transaction will handle assigning any keys as necessary. diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 8ec22035c929..cb5484abbf38 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -1,5 +1,10 @@ """Create / interact with gcloud datastore datasets.""" +from gcloud.datastore import helpers +from gcloud.datastore.entity import Entity +from gcloud.datastore.query import Query +from gcloud.datastore.transaction import Transaction + class Dataset(object): """A dataset in the Cloud Datastore. @@ -70,8 +75,6 @@ def query(self, *args, **kwargs): :rtype: :class:`gcloud.datastore.query.Query` :returns: a new Query instance, bound to this dataset. """ - # This import is here to avoid circular references. - from gcloud.datastore.query import Query kwargs['dataset'] = self return Query(*args, **kwargs) @@ -84,8 +87,6 @@ def entity(self, kind): :rtype: :class:`gcloud.datastore.entity.Entity` :returns: a new Entity instance, bound to this dataset. """ - # This import is here to avoid circular references. - from gcloud.datastore.entity import Entity return Entity(dataset=self, kind=kind) def transaction(self, *args, **kwargs): @@ -98,8 +99,6 @@ def transaction(self, *args, **kwargs): :rtype: :class:`gcloud.datastore.transaction.Transaction` :returns: a new Transaction instance, bound to this dataset. """ - # This import is here to avoid circular references. - from gcloud.datastore.transaction import Transaction kwargs['dataset'] = self return Transaction(*args, **kwargs) @@ -125,9 +124,6 @@ def get_entities(self, keys): :rtype: list of :class:`gcloud.datastore.entity.Entity` :return: The requested entities. """ - # This import is here to avoid circular references. - from gcloud.datastore.entity import Entity - entity_pbs = self.connection().lookup( dataset_id=self.id(), key_pbs=[k.to_protobuf() for k in keys] @@ -135,5 +131,6 @@ def get_entities(self, keys): entities = [] for entity_pb in entity_pbs: - entities.append(Entity.from_protobuf(entity_pb, dataset=self)) + entities.append(helpers.entity_from_protobuf( + entity_pb, dataset=self)) return entities diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 88ecf1324ff2..a81adaf7fc6f 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -146,32 +146,6 @@ def from_key(cls, key, dataset=None): return cls(dataset).key(key) - @classmethod - def from_protobuf(cls, pb, dataset=None): - """Factory method for creating an entity based on a protobuf. - - The protobuf should be one returned from the Cloud Datastore - Protobuf API. - - :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Entity` - :param pb: The Protobuf representing the entity. - - :returns: The :class:`Entity` derived from the - :class:`gcloud.datastore.datastore_v1_pb2.Entity`. - """ - - # This is here to avoid circular imports. - from gcloud.datastore import _helpers - - key = Key.from_protobuf(pb.key) - entity = cls.from_key(key, dataset) - - for property_pb in pb.property: - value = _helpers._get_value_from_property_pb(property_pb) - entity[property_pb.name] = value - - return entity - @property def _must_key(self): """Return our key, or raise NoKey if not set. @@ -248,9 +222,11 @@ def save(self): transaction.add_auto_id_entity(self) if isinstance(key_pb, datastore_pb.Key): - updated_key = Key.from_protobuf(key_pb) + path = [ + {'kind': element.kind, 'id': element.id, 'name': element.name} + for element in key_pb.path_element] # Update the path (which may have been altered). - self._key = key.path(updated_key.path()) + self._key = key.path(path) return self diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/helpers.py similarity index 78% rename from gcloud/datastore/_helpers.py rename to gcloud/datastore/helpers.py index 1ebba5377457..c8927bf2beea 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/helpers.py @@ -1,7 +1,9 @@ """Helper functions for dealing with Cloud Datastore's Protobuf API. -These functions are *not* part of the API. +The non-private functions are part of the API. """ +__all__ = ('entity_from_protobuf', 'key_from_protobuf') + import calendar import datetime @@ -14,6 +16,60 @@ INT_VALUE_CHECKER = Int64ValueChecker() +def entity_from_protobuf(pb, dataset=None): + """Factory method for creating an entity based on a protobuf. + + The protobuf should be one returned from the Cloud Datastore + Protobuf API. + + :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Entity` + :param pb: The Protobuf representing the entity. + + :rtype: :class:`gcloud.datastore.entity.Entity` + :returns: The entity derived from the protobuf. + """ + key = key_from_protobuf(pb.key) + entity = Entity.from_key(key, dataset) + + for property_pb in pb.property: + value = _get_value_from_property_pb(property_pb) + entity[property_pb.name] = value + + return entity + + +def key_from_protobuf(pb): + """Factory method for creating a key based on a protobuf. + + The protobuf should be one returned from the Cloud Datastore + Protobuf API. + + :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` + :param pb: The Protobuf representing the key. + + :rtype: :class:`gcloud.datastore.key.Key` + :returns: a new `Key` instance + """ + path = [] + for element in pb.path_element: + element_dict = {'kind': element.kind} + + if element.HasField('id'): + element_dict['id'] = element.id + + # This is safe: we expect proto objects returned will only have + # one of `name` or `id` set. + if element.HasField('name'): + element_dict['name'] = element.name + + path.append(element_dict) + + dataset_id = pb.partition_id.dataset_id or None + namespace = pb.partition_id.namespace + + return Key(path, namespace, dataset_id) + + def _get_protobuf_attribute_and_value(val): """Given a value, return the protobuf attribute name and proper value. @@ -105,7 +161,7 @@ def _get_value_from_value_pb(value_pb): result = naive.replace(tzinfo=pytz.utc) elif value_pb.HasField('key_value'): - result = Key.from_protobuf(value_pb.key_value) + result = key_from_protobuf(value_pb.key_value) elif value_pb.HasField('boolean_value'): result = value_pb.boolean_value @@ -123,7 +179,7 @@ def _get_value_from_value_pb(value_pb): result = value_pb.blob_value elif value_pb.HasField('entity_value'): - result = Entity.from_protobuf(value_pb.entity_value) + result = entity_from_protobuf(value_pb.entity_value) elif value_pb.list_value: result = [_get_value_from_value_pb(x) for x in value_pb.list_value] diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index be28c4db42e7..983e7cddb531 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -42,38 +42,6 @@ def _clone(self): """ return copy.deepcopy(self) - @classmethod - def from_protobuf(cls, pb): - """Factory method for creating a key based on a protobuf. - - The protobuf should be one returned from the Cloud Datastore - Protobuf API. - - :type pb: :class:`gcloud.datastore.datastore_v1_pb2.Key` - :param pb: The Protobuf representing the key. - - :rtype: :class:`gcloud.datastore.key.Key` - :returns: a new `Key` instance - """ - path = [] - for element in pb.path_element: - element_dict = {'kind': element.kind} - - if element.HasField('id'): - element_dict['id'] = element.id - - # This is safe: we expect proto objects returned will only have - # one of `name` or `id` set. - if element.HasField('name'): - element_dict['name'] = element.name - - path.append(element_dict) - - dataset_id = pb.partition_id.dataset_id or None - namespace = pb.partition_id.namespace - - return cls(path, namespace, dataset_id) - def to_protobuf(self): """Return a protobuf corresponding to the key. diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index b8ca58b7c483..edfe00c7889d 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -3,8 +3,7 @@ import base64 from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore import _helpers -from gcloud.datastore.entity import Entity +from gcloud.datastore import helpers from gcloud.datastore.key import Key @@ -151,7 +150,7 @@ def filter(self, expression, value): property_filter.operator = operator # Set the value to filter on based on the type. - _helpers._set_protobuf_value(property_filter.value, value) + helpers._set_protobuf_value(property_filter.value, value) return clone def ancestor(self, ancestor): @@ -343,7 +342,7 @@ def fetch(self, limit=None): entity_pbs, end_cursor = query_results[:2] self._cursor = end_cursor - return [Entity.from_protobuf(entity, dataset=self.dataset()) + return [helpers.entity_from_protobuf(entity, dataset=self.dataset()) for entity in entity_pbs] def cursor(self): diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index 4d3675aedee1..d4c135086eb7 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -76,48 +76,6 @@ def test_from_key_w_dataset(self): self.assertEqual(key.kind(), _KIND) self.assertEqual(key.id(), _ID) - def test_from_protobuf_wo_dataset(self): - from gcloud.datastore import datastore_v1_pb2 as datastore_pb - - entity_pb = datastore_pb.Entity() - entity_pb.key.partition_id.dataset_id = _DATASET_ID - entity_pb.key.path_element.add(kind=_KIND, id=_ID) - entity_pb.key.partition_id.dataset_id = _DATASET_ID - prop_pb = entity_pb.property.add() - prop_pb.name = 'foo' - prop_pb.value.string_value = 'Foo' - klass = self._getTargetClass() - entity = klass.from_protobuf(entity_pb) - self.assertTrue(entity.dataset() is None) - self.assertEqual(entity.kind(), _KIND) - self.assertEqual(entity['foo'], 'Foo') - key = entity.key() - self.assertEqual(key._dataset_id, _DATASET_ID) - self.assertEqual(key.kind(), _KIND) - self.assertEqual(key.id(), _ID) - - def test_from_protobuf_w_dataset(self): - from gcloud.datastore import datastore_v1_pb2 as datastore_pb - from gcloud.datastore.dataset import Dataset - - entity_pb = datastore_pb.Entity() - entity_pb.key.partition_id.dataset_id = _DATASET_ID - entity_pb.key.path_element.add(kind=_KIND, id=_ID) - entity_pb.key.partition_id.dataset_id = _DATASET_ID - prop_pb = entity_pb.property.add() - prop_pb.name = 'foo' - prop_pb.value.string_value = 'Foo' - dataset = Dataset(_DATASET_ID) - klass = self._getTargetClass() - entity = klass.from_protobuf(entity_pb, dataset) - self.assertTrue(entity.dataset() is dataset) - self.assertEqual(entity.kind(), _KIND) - self.assertEqual(entity['foo'], 'Foo') - key = entity.key() - self.assertEqual(key._dataset_id, _DATASET_ID) - self.assertEqual(key.kind(), _KIND) - self.assertEqual(key.id(), _ID) - def test__must_key_no_key(self): from gcloud.datastore.entity import NoKey @@ -224,7 +182,7 @@ def test_save_w_returned_key(self): self.assertEqual(entity['foo'], 'Foo') self.assertEqual(connection._saved, (_DATASET_ID, 'KEY', {'foo': 'Foo'})) - self.assertEqual(key._path, [{'kind': _KIND, 'id': _ID}]) + self.assertEqual(key._path, [{'kind': _KIND, 'id': _ID, 'name': ''}]) def test_delete_no_key(self): from gcloud.datastore.entity import NoKey diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test_helpers.py similarity index 70% rename from gcloud/datastore/test__helpers.py rename to gcloud/datastore/test_helpers.py index 84ea41ea1ec0..c2c8aae45933 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -1,10 +1,125 @@ import unittest2 +class Test_entity_from_protobuf(unittest2.TestCase): + + _MARKER = object() + + def _callFUT(self, val, dataset=_MARKER): + from gcloud.datastore.helpers import entity_from_protobuf + + if dataset is self._MARKER: + return entity_from_protobuf(val) + + return entity_from_protobuf(val, dataset) + + def test_wo_dataset(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + + _DATASET_ID = 'DATASET' + _KIND = 'KIND' + _ID = 1234 + entity_pb = datastore_pb.Entity() + entity_pb.key.partition_id.dataset_id = _DATASET_ID + entity_pb.key.path_element.add(kind=_KIND, id=_ID) + entity_pb.key.partition_id.dataset_id = _DATASET_ID + prop_pb = entity_pb.property.add() + prop_pb.name = 'foo' + prop_pb.value.string_value = 'Foo' + entity = self._callFUT(entity_pb) + self.assertTrue(entity.dataset() is None) + self.assertEqual(entity.kind(), _KIND) + self.assertEqual(entity['foo'], 'Foo') + key = entity.key() + self.assertEqual(key._dataset_id, _DATASET_ID) + self.assertEqual(key.kind(), _KIND) + self.assertEqual(key.id(), _ID) + + def test_w_dataset(self): + from gcloud.datastore import datastore_v1_pb2 as datastore_pb + from gcloud.datastore.dataset import Dataset + + _DATASET_ID = 'DATASET' + _KIND = 'KIND' + _ID = 1234 + entity_pb = datastore_pb.Entity() + entity_pb.key.partition_id.dataset_id = _DATASET_ID + entity_pb.key.path_element.add(kind=_KIND, id=_ID) + entity_pb.key.partition_id.dataset_id = _DATASET_ID + prop_pb = entity_pb.property.add() + prop_pb.name = 'foo' + prop_pb.value.string_value = 'Foo' + dataset = Dataset(_DATASET_ID) + entity = self._callFUT(entity_pb, dataset) + self.assertTrue(entity.dataset() is dataset) + self.assertEqual(entity.kind(), _KIND) + self.assertEqual(entity['foo'], 'Foo') + key = entity.key() + self.assertEqual(key._dataset_id, _DATASET_ID) + self.assertEqual(key.kind(), _KIND) + self.assertEqual(key.id(), _ID) + + +class Test_key_from_protobuf(unittest2.TestCase): + + def _callFUT(self, val): + from gcloud.datastore.helpers import key_from_protobuf + + return key_from_protobuf(val) + + def _makePB(self, dataset_id=None, namespace=None, path=()): + from gcloud.datastore.datastore_v1_pb2 import Key + pb = Key() + if dataset_id is not None: + pb.partition_id.dataset_id = dataset_id + if namespace is not None: + pb.partition_id.namespace = namespace + for elem in path: + added = pb.path_element.add() + added.kind = elem['kind'] + if 'id' in elem: + added.id = elem['id'] + if 'name' in elem: + added.name = elem['name'] + return pb + + def test_w_dataset_id_in_pb(self): + _DATASET = 'DATASET' + pb = self._makePB(_DATASET) + key = self._callFUT(pb) + self.assertEqual(key._dataset_id, _DATASET) + + def test_w_namespace_in_pb(self): + _NAMESPACE = 'NAMESPACE' + pb = self._makePB(namespace=_NAMESPACE) + key = self._callFUT(pb) + self.assertEqual(key.namespace(), _NAMESPACE) + + def test_w_path_in_pb(self): + _DATASET = 'DATASET' + _NAMESPACE = 'NAMESPACE' + pb = self._makePB(_DATASET, _NAMESPACE) + _PARENT = 'PARENT' + _CHILD = 'CHILD' + _GRANDCHILD = 'GRANDCHILD' + _ID = 1234 + _ID2 = 5678 + _NAME = 'NAME' + _NAME2 = 'NAME2' + _PATH = [ + {'kind': _PARENT, 'name': _NAME}, + {'kind': _CHILD, 'id': _ID}, + {'kind': _GRANDCHILD, 'id': _ID2, 'name': _NAME2}, + ] + pb = self._makePB(path=_PATH) + key = self._callFUT(pb) + self.assertEqual(key.path(), _PATH) + + class Test__get_protobuf_attribute_and_value(unittest2.TestCase): def _callFUT(self, val): - from gcloud.datastore._helpers import _get_protobuf_attribute_and_value + from gcloud.datastore.helpers import _get_protobuf_attribute_and_value return _get_protobuf_attribute_and_value(val) @@ -107,7 +222,7 @@ def test_object(self): class Test__get_value_from_value_pb(unittest2.TestCase): def _callFUT(self, pb): - from gcloud.datastore._helpers import _get_value_from_value_pb + from gcloud.datastore.helpers import _get_value_from_value_pb return _get_value_from_value_pb(pb) @@ -197,7 +312,7 @@ def test_unknown(self): class Test__get_value_from_property_pb(unittest2.TestCase): def _callFUT(self, pb): - from gcloud.datastore._helpers import _get_value_from_property_pb + from gcloud.datastore.helpers import _get_value_from_property_pb return _get_value_from_property_pb(pb) @@ -212,7 +327,7 @@ def test_it(self): class Test_set_protobuf_value(unittest2.TestCase): def _callFUT(self, value_pb, val): - from gcloud.datastore._helpers import _set_protobuf_value + from gcloud.datastore.helpers import _set_protobuf_value return _set_protobuf_value(value_pb, val) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index a05e29de7646..1e92060311e3 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -10,22 +10,6 @@ def _getTargetClass(self): def _makeOne(self, path=None, namespace=None, dataset_id=None): return self._getTargetClass()(path, namespace, dataset_id) - def _makePB(self, dataset_id=None, namespace=None, path=()): - from gcloud.datastore.datastore_v1_pb2 import Key - pb = Key() - if dataset_id is not None: - pb.partition_id.dataset_id = dataset_id - if namespace is not None: - pb.partition_id.namespace = namespace - for elem in path: - added = pb.path_element.add() - added.kind = elem['kind'] - if 'id' in elem: - added.id = elem['id'] - if 'name' in elem: - added.name = elem['name'] - return pb - def test_ctor_defaults(self): key = self._makeOne() self.assertEqual(key._dataset_id, None) @@ -58,38 +42,6 @@ def test__clone(self): self.assertEqual(clone.kind(), _KIND) self.assertEqual(clone.path(), _PATH) - def test_from_protobuf_w_dataset_id_in_pb(self): - _DATASET = 'DATASET' - pb = self._makePB(_DATASET) - key = self._getTargetClass().from_protobuf(pb) - self.assertEqual(key._dataset_id, _DATASET) - - def test_from_protobuf_w_namespace_in_pb(self): - _NAMESPACE = 'NAMESPACE' - pb = self._makePB(namespace=_NAMESPACE) - key = self._getTargetClass().from_protobuf(pb) - self.assertEqual(key.namespace(), _NAMESPACE) - - def test_from_protobuf_w_path_in_pb(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - pb = self._makePB(_DATASET, _NAMESPACE) - _PARENT = 'PARENT' - _CHILD = 'CHILD' - _GRANDCHILD = 'GRANDCHILD' - _ID = 1234 - _ID2 = 5678 - _NAME = 'NAME' - _NAME2 = 'NAME2' - _PATH = [ - {'kind': _PARENT, 'name': _NAME}, - {'kind': _CHILD, 'id': _ID}, - {'kind': _GRANDCHILD, 'id': _ID2, 'name': _NAME2}, - ] - pb = self._makePB(path=_PATH) - key = self._getTargetClass().from_protobuf(pb) - self.assertEqual(key.path(), _PATH) - def test_to_protobuf_defaults(self): from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB key = self._makeOne() diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index d20f35a686d4..434c5fa31624 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -1,7 +1,7 @@ """Create / interact with gcloud datastore transactions.""" from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore.key import Key +from gcloud.datastore import helpers class Transaction(object): @@ -236,7 +236,7 @@ def commit(self): # For any of the auto-id entities, make sure we update their keys. for i, entity in enumerate(self._auto_id_entities): key_pb = result.insert_auto_id_key[i] - key = Key.from_protobuf(key_pb) + key = helpers.key_from_protobuf(key_pb) entity.key(entity.key().path(key.path())) # Tell the connection that the transaction is over. diff --git a/pylintrc_default b/pylintrc_default index e2c61f529e52..38d7b97eb2de 100644 --- a/pylintrc_default +++ b/pylintrc_default @@ -24,7 +24,7 @@ ignore = datastore_v1_pb2.py [MESSAGES CONTROL] disable = I, protected-access, maybe-no-member, no-member, redefined-builtin, star-args, missing-format-attribute, - similarities, cyclic-import, arguments-differ, + similarities, arguments-differ,