Skip to content

Commit

Permalink
Making Key fully immutable and requiring a dataset ID.
Browse files Browse the repository at this point in the history
  • Loading branch information
dhermes committed Dec 21, 2014
1 parent 0aec96c commit 7865b29
Show file tree
Hide file tree
Showing 19 changed files with 851 additions and 617 deletions.
17 changes: 9 additions & 8 deletions gcloud/datastore/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,8 +241,7 @@ def lookup(self, dataset_id, key_pbs,
if single_key:
key_pbs = [key_pbs]

for key_pb in key_pbs:
lookup_request.key.add().CopyFrom(key_pb)
helpers._add_keys_to_request(lookup_request.key, key_pbs)

results, missing_found, deferred_found = self._lookup(
lookup_request, dataset_id, deferred is not None)
Expand Down Expand Up @@ -417,8 +416,7 @@ def allocate_ids(self, dataset_id, key_pbs):
:returns: An equal number of keys, with IDs filled in by the backend.
"""
request = datastore_pb.AllocateIdsRequest()
for key_pb in key_pbs:
request.key.add().CopyFrom(key_pb)
helpers._add_keys_to_request(request.key, key_pbs)
# Nothing to do with this response, so just execute the method.
response = self._rpc(dataset_id, 'allocateIds', request,
datastore_pb.AllocateIdsResponse)
Expand All @@ -444,8 +442,14 @@ def save_entity(self, dataset_id, key_pb, properties,
:type exclude_from_indexes: sequence of str
:param exclude_from_indexes: Names of properties *not* to be indexed.
:rtype: bool or :class:`gcloud.datastore.datastore_v1_pb2.Key`
:returns: True if the save succeeds, unless a new ID has been
automatically allocated. In the auto ID case, the newly
created key protobuf is returned.
"""
mutation = self.mutation()
key_pb = helpers._prepare_key_for_request(key_pb)

# If the Key is complete, we should upsert
# instead of using insert_auto_id.
Expand Down Expand Up @@ -506,10 +510,7 @@ def delete_entities(self, dataset_id, key_pbs):
:returns: True
"""
mutation = self.mutation()

for key_pb in key_pbs:
delete = mutation.delete.add()
delete.CopyFrom(key_pb)
helpers._add_keys_to_request(mutation.delete, key_pbs)

if not self.transaction():
self.commit(dataset_id, mutation)
Expand Down
6 changes: 3 additions & 3 deletions gcloud/datastore/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def get_entity(self, key_or_path):
if isinstance(key_or_path, Key):
entities = self.get_entities([key_or_path])
else:
key = Key.from_path(*key_or_path)
key = Key(*key_or_path)
entities = self.get_entities([key])

if entities:
Expand Down Expand Up @@ -196,7 +196,7 @@ def allocate_ids(self, incomplete_key, num_ids):
:return: The (complete) keys allocated with `incomplete_key` as root.
:raises: `ValueError` if `incomplete_key` is not a partial key.
"""
if not incomplete_key.is_partial():
if not incomplete_key.is_partial:
raise ValueError(('Key is not partial.', incomplete_key))

incomplete_key_pb = incomplete_key.to_protobuf()
Expand All @@ -206,5 +206,5 @@ def allocate_ids(self, incomplete_key, num_ids):
self.id(), incomplete_key_pbs)
allocated_ids = [allocated_key_pb.path_element[-1].id
for allocated_key_pb in allocated_key_pbs]
return [incomplete_key.id(allocated_id)
return [incomplete_key.complete_key(allocated_id)
for allocated_id in allocated_ids]
28 changes: 10 additions & 18 deletions gcloud/datastore/entity.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,10 @@ def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
super(Entity, self).__init__(dataset=dataset)
self._data = {}
if kind:
self._key = Key().kind(kind)
# This is temporary since the dataset will eventually be 100%
# removed from the Entity and the Dataset class may be
# destroyed.
self._key = Key(kind, dataset_id=self.dataset().id())
else:
self._key = None
self._exclude_from_indexes = set(exclude_from_indexes)
Expand Down Expand Up @@ -193,7 +196,7 @@ def kind(self):
"""

if self._key:
return self._key.kind()
return self._key.kind

def exclude_from_indexes(self):
"""Names of fields which are *not* to be indexed for this entity.
Expand Down Expand Up @@ -284,29 +287,18 @@ def save(self):
key_pb = connection.save_entity(
dataset_id=dataset.id(),
key_pb=key.to_protobuf(),
properties=self._data,
properties=self.to_dict(),
exclude_from_indexes=self.exclude_from_indexes())

# If we are in a transaction and the current entity needs an
# automatically assigned ID, tell the transaction where to put that.
transaction = connection.transaction()
if transaction and key.is_partial():
if transaction and key.is_partial:
transaction.add_auto_id_entity(self)

if isinstance(key_pb, datastore_pb.Key):
# Update the path (which may have been altered).
# NOTE: The underlying namespace can't have changed in a save().
# The value of the dataset ID may have changed from implicit
# (i.e. None, with the ID implied from the dataset.Dataset
# object associated with the Entity/Key), but if it was
# implicit before the save() we leave it as implicit.
path = []
for element in key_pb.path_element:
key_part = {}
for descriptor, value in element._fields.items():
key_part[descriptor.name] = value
path.append(key_part)
self._key = key.path(path)
# Update the key (which may have been altered).
self._key = self.key().compare_to_proto(key_pb)

return self

Expand All @@ -327,6 +319,6 @@ def delete(self):

def __repr__(self):
if self._key:
return '<Entity%s %r>' % (self._key.path(), self._data)
return '<Entity%s %r>' % (self._key.path, self._data)
else:
return '<Entity %r>' % (self._data,)
56 changes: 47 additions & 9 deletions gcloud/datastore/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import pytz
import six

from gcloud.datastore import datastore_v1_pb2 as datastore_pb
from gcloud.datastore.entity import Entity
from gcloud.datastore.key import Key

Expand Down Expand Up @@ -65,19 +66,15 @@ def key_from_protobuf(pb):
:rtype: :class:`gcloud.datastore.key.Key`
:returns: a new `Key` instance
"""
path = []
path_args = []
for element in pb.path_element:
element_dict = {'kind': element.kind}

path_args.append(element.kind)
if element.HasField('id'):
element_dict['id'] = element.id

path_args.append(element.id)
# This is safe: we expect proto objects returned will only have
# one of `name` or `id` set.
if element.HasField('name'):
element_dict['name'] = element.name

path.append(element_dict)
path_args.append(element.name)

dataset_id = None
if pb.partition_id.HasField('dataset_id'):
Expand All @@ -86,7 +83,7 @@ def key_from_protobuf(pb):
if pb.partition_id.HasField('namespace'):
namespace = pb.partition_id.namespace

return Key(path, namespace, dataset_id)
return Key(*path_args, namespace=namespace, dataset_id=dataset_id)


def _pb_attr_value(val):
Expand Down Expand Up @@ -263,3 +260,44 @@ def _set_protobuf_value(value_pb, val):
_set_protobuf_value(i_pb, item)
else: # scalar, just assign
setattr(value_pb, attr, val)


def _prepare_key_for_request(key_pb):
"""Add protobuf keys to a request object.
:type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pb: A key to be added to a request.
:rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:returns: A key which will be added to a request. It will be the
original if nothing needs to be changed.
"""
if key_pb.partition_id.HasField('dataset_id'):
# We remove the dataset_id from the protobuf. This is because
# the backend fails a request if the key contains un-prefixed
# dataset ID. The backend fails because requests to
# /datastore/.../datasets/foo/...
# and
# /datastore/.../datasets/s~foo/...
# both go to the datastore given by 's~foo'. So if the key
# protobuf in the request body has dataset_id='foo', the
# backend will reject since 'foo' != 's~foo'.
new_key_pb = datastore_pb.Key()
new_key_pb.CopyFrom(key_pb)
new_key_pb.partition_id.ClearField('dataset_id')
key_pb = new_key_pb
return key_pb


def _add_keys_to_request(request_field_pb, key_pbs):
"""Add protobuf keys to a request object.
:type request_field_pb: `RepeatedCompositeFieldContainer`
:param request_field_pb: A repeated proto field that contains keys.
:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param key_pbs: The keys to add to a request.
"""
for key_pb in key_pbs:
key_pb = _prepare_key_for_request(key_pb)
request_field_pb.add().CopyFrom(key_pb)
Loading

0 comments on commit 7865b29

Please sign in to comment.