Skip to content

Commit 48d2015

Browse files
committed
Make dataset_id required on Key.
Addresses seventh part of #451. Also requires removing the dataset_id from generated protobufs due to googleapis/google-cloud-datastore#59. This occurs also in __key__ filters and ancestor queries.
1 parent e86ba09 commit 48d2015

File tree

14 files changed

+234
-86
lines changed

14 files changed

+234
-86
lines changed

gcloud/datastore/connection.py

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -241,8 +241,7 @@ def lookup(self, dataset_id, key_pbs,
241241
if single_key:
242242
key_pbs = [key_pbs]
243243

244-
for key_pb in key_pbs:
245-
lookup_request.key.add().CopyFrom(key_pb)
244+
helpers._add_keys_to_request(lookup_request.key, key_pbs)
246245

247246
results, missing_found, deferred_found = self._lookup(
248247
lookup_request, dataset_id, deferred is not None)
@@ -417,8 +416,7 @@ def allocate_ids(self, dataset_id, key_pbs):
417416
:returns: An equal number of keys, with IDs filled in by the backend.
418417
"""
419418
request = datastore_pb.AllocateIdsRequest()
420-
for key_pb in key_pbs:
421-
request.key.add().CopyFrom(key_pb)
419+
helpers._add_keys_to_request(request.key, key_pbs)
422420
# Nothing to do with this response, so just execute the method.
423421
response = self._rpc(dataset_id, 'allocateIds', request,
424422
datastore_pb.AllocateIdsResponse)
@@ -451,6 +449,7 @@ def save_entity(self, dataset_id, key_pb, properties,
451449
either `None` or an integer that has been assigned.
452450
"""
453451
mutation = self.mutation()
452+
key_pb = helpers._prepare_key_for_request(key_pb)
454453

455454
# If the Key is complete, we should upsert
456455
# instead of using insert_auto_id.
@@ -515,10 +514,7 @@ def delete_entities(self, dataset_id, key_pbs):
515514
:returns: True
516515
"""
517516
mutation = self.mutation()
518-
519-
for key_pb in key_pbs:
520-
delete = mutation.delete.add()
521-
delete.CopyFrom(key_pb)
517+
helpers._add_keys_to_request(mutation.delete, key_pbs)
522518

523519
if not self.transaction():
524520
self.commit(dataset_id, mutation)

gcloud/datastore/entity.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def __init__(self, dataset=None, kind=None, exclude_from_indexes=()):
9999
# _implicit_environ._DatastoreBase to avoid split MRO.
100100
self._dataset = dataset or _implicit_environ.DATASET
101101
if kind:
102-
self._key = Key(kind)
102+
self._key = Key(kind, dataset_id=self.dataset().id())
103103
else:
104104
self._key = None
105105
self._exclude_from_indexes = set(exclude_from_indexes)

gcloud/datastore/helpers.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import pytz
2626
import six
2727

28+
from gcloud.datastore import datastore_v1_pb2 as datastore_pb
2829
from gcloud.datastore.entity import Entity
2930
from gcloud.datastore.key import Key
3031

@@ -259,3 +260,44 @@ def _set_protobuf_value(value_pb, val):
259260
_set_protobuf_value(i_pb, item)
260261
else: # scalar, just assign
261262
setattr(value_pb, attr, val)
263+
264+
265+
def _prepare_key_for_request(key_pb):
266+
"""Add protobuf keys to a request object.
267+
268+
:type key_pb: :class:`gcloud.datastore.datastore_v1_pb2.Key`
269+
:param key_pb: A key to be added to a request.
270+
271+
:rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key`
272+
:returns: A key which will be added to a request. It will be the
273+
original if nothing needs to be changed.
274+
"""
275+
if key_pb.partition_id.HasField('dataset_id'):
276+
# We remove the dataset_id from the protobuf. This is because
277+
# the backend fails a request if the key contains un-prefixed
278+
# dataset ID. The backend fails because requests to
279+
# /datastore/.../datasets/foo/...
280+
# and
281+
# /datastore/.../datasets/s~foo/...
282+
# both go to the datastore given by 's~foo'. So if the key
283+
# protobuf in the request body has dataset_id='foo', the
284+
# backend will reject since 'foo' != 's~foo'.
285+
new_key_pb = datastore_pb.Key()
286+
new_key_pb.CopyFrom(key_pb)
287+
new_key_pb.partition_id.ClearField('dataset_id')
288+
key_pb = new_key_pb
289+
return key_pb
290+
291+
292+
def _add_keys_to_request(request_field_pb, key_pbs):
293+
"""Add protobuf keys to a request object.
294+
295+
:type request_field_pb: `RepeatedCompositeFieldContainer`
296+
:param request_field_pb: A repeated proto field that contains keys.
297+
298+
:type key_pbs: list of :class:`gcloud.datastore.datastore_v1_pb2.Key`
299+
:param key_pbs: The keys to add to a request.
300+
"""
301+
for key_pb in key_pbs:
302+
key_pb = _prepare_key_for_request(key_pb)
303+
request_field_pb.add().CopyFrom(key_pb)

gcloud/datastore/key.py

Lines changed: 21 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from itertools import izip
1919
import six
2020

21+
from gcloud.datastore import _implicit_environ
2122
from gcloud.datastore import datastore_v1_pb2 as datastore_pb
2223

2324

@@ -56,24 +57,30 @@ def __init__(self, *path_args, **kwargs):
5657
passed as a keyword argument.
5758
5859
:type dataset_id: string
59-
:param dataset_id: The dataset ID associated with the key. Can only be
60-
passed as a keyword argument.
61-
62-
# This note will be obsolete by the end of #451.
63-
64-
.. note::
65-
The key's ``_dataset_id`` field must be None for keys created
66-
by application code. The
67-
:func:`gcloud.datastore.helpers.key_from_protobuf` factory
68-
will be set the field to an appropriate value for keys
69-
returned from the datastore backend. The application
70-
**must** treat any value set by the back-end as opaque.
60+
:param dataset_id: The dataset ID associated with the key. This is
61+
required. Can only be passed as a keyword argument.
7162
"""
7263
self._path = self._parse_path(path_args)
7364
self._flat_path = path_args
7465
self._parent = None
7566
self._namespace = kwargs.get('namespace')
7667
self._dataset_id = kwargs.get('dataset_id')
68+
self._validate_dataset_id()
69+
70+
def _validate_dataset_id(self):
71+
"""Ensures the dataset ID is set.
72+
73+
If unset, attempts to imply the ID from the environment.
74+
75+
:raises: `ValueError` if there is no `dataset_id` and none
76+
can be implied.
77+
"""
78+
if self._dataset_id is None:
79+
if _implicit_environ.DATASET is not None:
80+
# This assumes DATASET.id() is not None.
81+
self._dataset_id = _implicit_environ.DATASET.id()
82+
else:
83+
raise ValueError('A Key must have a dataset ID set.')
7784

7885
@staticmethod
7986
def _parse_path(path_args):
@@ -160,9 +167,7 @@ def to_protobuf(self):
160167
:returns: The Protobuf representing the key.
161168
"""
162169
key = datastore_pb.Key()
163-
164-
if self.dataset_id is not None:
165-
key.partition_id.dataset_id = self.dataset_id
170+
key.partition_id.dataset_id = self.dataset_id
166171

167172
if self.namespace:
168173
key.partition_id.namespace = self.namespace
@@ -297,4 +302,4 @@ def parent(self):
297302
return self._parent
298303

299304
def __repr__(self):
300-
return '<Key%s>' % self.path
305+
return '<Key%s, dataset=%s>' % (self.path, self.dataset_id)

gcloud/datastore/query.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,14 @@ def filter(self, property_name, operator, value):
163163
property_filter.operator = pb_op_enum
164164

165165
# Set the value to filter on based on the type.
166-
helpers._set_protobuf_value(property_filter.value, value)
166+
if property_name == '__key__':
167+
if not isinstance(value, Key):
168+
raise TypeError('__key__ query requires a Key instance.')
169+
key_pb = value.to_protobuf()
170+
property_filter.value.key_value.CopyFrom(
171+
helpers._prepare_key_for_request(key_pb))
172+
else:
173+
helpers._set_protobuf_value(property_filter.value, value)
167174
return clone
168175

169176
def ancestor(self, ancestor):
@@ -216,7 +223,8 @@ def ancestor(self, ancestor):
216223
ancestor_filter = composite_filter.filter.add().property_filter
217224
ancestor_filter.property.name = '__key__'
218225
ancestor_filter.operator = datastore_pb.PropertyFilter.HAS_ANCESTOR
219-
ancestor_filter.value.key_value.CopyFrom(ancestor.to_protobuf())
226+
ancestor_pb = helpers._prepare_key_for_request(ancestor.to_protobuf())
227+
ancestor_filter.value.key_value.CopyFrom(ancestor_pb)
220228

221229
return clone
222230

gcloud/datastore/test_connection.py

Lines changed: 29 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,7 @@ def test_lookup_single_key_empty_response(self):
235235
request.ParseFromString(cw['body'])
236236
keys = list(request.key)
237237
self.assertEqual(len(keys), 1)
238-
self.assertEqual(keys[0], key_pb)
238+
_compare_key_pb_after_request(self, key_pb, keys[0])
239239

240240
def test_lookup_single_key_empty_response_w_eventual(self):
241241
from gcloud.datastore.connection import datastore_pb
@@ -261,7 +261,7 @@ def test_lookup_single_key_empty_response_w_eventual(self):
261261
request.ParseFromString(cw['body'])
262262
keys = list(request.key)
263263
self.assertEqual(len(keys), 1)
264-
self.assertEqual(keys[0], key_pb)
264+
_compare_key_pb_after_request(self, key_pb, keys[0])
265265
self.assertEqual(request.read_options.read_consistency,
266266
datastore_pb.ReadOptions.EVENTUAL)
267267
self.assertEqual(request.read_options.transaction, '')
@@ -301,7 +301,7 @@ def test_lookup_single_key_empty_response_w_transaction(self):
301301
request.ParseFromString(cw['body'])
302302
keys = list(request.key)
303303
self.assertEqual(len(keys), 1)
304-
self.assertEqual(keys[0], key_pb)
304+
_compare_key_pb_after_request(self, key_pb, keys[0])
305305
self.assertEqual(request.read_options.transaction, TRANSACTION)
306306

307307
def test_lookup_single_key_nonempty_response(self):
@@ -333,7 +333,7 @@ def test_lookup_single_key_nonempty_response(self):
333333
request.ParseFromString(cw['body'])
334334
keys = list(request.key)
335335
self.assertEqual(len(keys), 1)
336-
self.assertEqual(keys[0], key_pb)
336+
_compare_key_pb_after_request(self, key_pb, keys[0])
337337

338338
def test_lookup_multiple_keys_empty_response(self):
339339
from gcloud.datastore.connection import datastore_pb
@@ -360,8 +360,8 @@ def test_lookup_multiple_keys_empty_response(self):
360360
request.ParseFromString(cw['body'])
361361
keys = list(request.key)
362362
self.assertEqual(len(keys), 2)
363-
self.assertEqual(keys[0], key_pb1)
364-
self.assertEqual(keys[1], key_pb2)
363+
_compare_key_pb_after_request(self, key_pb1, keys[0])
364+
_compare_key_pb_after_request(self, key_pb2, keys[1])
365365

366366
def test_lookup_multiple_keys_w_missing(self):
367367
from gcloud.datastore.connection import datastore_pb
@@ -396,8 +396,8 @@ def test_lookup_multiple_keys_w_missing(self):
396396
request.ParseFromString(cw['body'])
397397
keys = list(request.key)
398398
self.assertEqual(len(keys), 2)
399-
self.assertEqual(keys[0], key_pb1)
400-
self.assertEqual(keys[1], key_pb2)
399+
_compare_key_pb_after_request(self, key_pb1, keys[0])
400+
_compare_key_pb_after_request(self, key_pb2, keys[1])
401401

402402
def test_lookup_multiple_keys_w_missing_non_empty(self):
403403
DATASET_ID = 'DATASET'
@@ -444,8 +444,8 @@ def test_lookup_multiple_keys_w_deferred(self):
444444
request.ParseFromString(cw['body'])
445445
keys = list(request.key)
446446
self.assertEqual(len(keys), 2)
447-
self.assertEqual(keys[0], key_pb1)
448-
self.assertEqual(keys[1], key_pb2)
447+
_compare_key_pb_after_request(self, key_pb1, keys[0])
448+
_compare_key_pb_after_request(self, key_pb2, keys[1])
449449

450450
def test_lookup_multiple_keys_w_deferred_non_empty(self):
451451
DATASET_ID = 'DATASET'
@@ -500,8 +500,8 @@ def test_lookup_multiple_keys_w_deferred_from_backend_but_not_passed(self):
500500
request.ParseFromString(cw[0]['body'])
501501
keys = list(request.key)
502502
self.assertEqual(len(keys), 2)
503-
self.assertEqual(keys[0], key_pb1)
504-
self.assertEqual(keys[1], key_pb2)
503+
_compare_key_pb_after_request(self, key_pb1, keys[0])
504+
_compare_key_pb_after_request(self, key_pb2, keys[1])
505505

506506
self._verifyProtobufCall(cw[1], URI, conn)
507507
request.ParseFromString(cw[1]['body'])
@@ -907,7 +907,9 @@ def test_allocate_ids_non_empty(self):
907907
rq_class = datastore_pb.AllocateIdsRequest
908908
request = rq_class()
909909
request.ParseFromString(cw['body'])
910-
self.assertEqual(list(request.key), before_key_pbs)
910+
self.assertEqual(len(request.key), len(before_key_pbs))
911+
for key_before, key_after in zip(before_key_pbs, request.key):
912+
_compare_key_pb_after_request(self, key_before, key_after)
911913

912914
def test_save_entity_wo_transaction_w_upsert(self):
913915
from gcloud.datastore.connection import datastore_pb
@@ -938,7 +940,7 @@ def test_save_entity_wo_transaction_w_upsert(self):
938940
upserts = list(mutation.upsert)
939941
self.assertEqual(len(upserts), 1)
940942
upsert = upserts[0]
941-
self.assertEqual(upsert.key, key_pb)
943+
_compare_key_pb_after_request(self, key_pb, upsert.key)
942944
props = list(upsert.property)
943945
self.assertEqual(len(props), 1)
944946
self.assertEqual(props[0].name, 'foo')
@@ -979,7 +981,7 @@ def test_save_entity_w_exclude_from_indexes(self):
979981
upserts = list(mutation.upsert)
980982
self.assertEqual(len(upserts), 1)
981983
upsert = upserts[0]
982-
self.assertEqual(upsert.key, key_pb)
984+
_compare_key_pb_after_request(self, key_pb, upsert.key)
983985
props = sorted(upsert.property,
984986
key=operator.attrgetter('name'),
985987
reverse=True)
@@ -1028,7 +1030,7 @@ def test_save_entity_wo_transaction_w_auto_id(self):
10281030
mutation = request.mutation
10291031
inserts = list(mutation.insert_auto_id)
10301032
insert = inserts[0]
1031-
self.assertEqual(insert.key, key_pb)
1033+
_compare_key_pb_after_request(self, key_pb, insert.key)
10321034
props = list(insert.property)
10331035
self.assertEqual(len(props), 1)
10341036
self.assertEqual(props[0].name, 'foo')
@@ -1112,7 +1114,7 @@ def test_delete_entities_wo_transaction(self):
11121114
deletes = list(mutation.delete)
11131115
self.assertEqual(len(deletes), 1)
11141116
delete = deletes[0]
1115-
self.assertEqual(delete, key_pb)
1117+
_compare_key_pb_after_request(self, key_pb, delete)
11161118
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
11171119

11181120
def test_delete_entities_w_transaction(self):
@@ -1168,3 +1170,13 @@ def __init__(self, id):
11681170

11691171
def id(self):
11701172
return self._id
1173+
1174+
1175+
def _compare_key_pb_after_request(test, key_before, key_after):
1176+
test.assertFalse(key_after.partition_id.HasField('dataset_id'))
1177+
test.assertEqual(key_before.partition_id.namespace,
1178+
key_after.partition_id.namespace)
1179+
test.assertEqual(len(key_before.path_element),
1180+
len(key_after.path_element))
1181+
for elt1, elt2 in zip(key_before.path_element, key_after.path_element):
1182+
test.assertEqual(elt1, elt2)

gcloud/datastore/test_dataset.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,15 @@ def test_get_entity_hit(self):
109109
self.assertEqual(list(result), ['foo'])
110110
self.assertEqual(result['foo'], 'Foo')
111111

112+
def test_get_entity_bad_type(self):
113+
DATASET_ID = 'DATASET'
114+
connection = _Connection()
115+
dataset = self._makeOne(DATASET_ID, connection)
116+
with self.assertRaises(AttributeError):
117+
dataset.get_entity(None)
118+
with self.assertRaises(AttributeError):
119+
dataset.get_entity([])
120+
112121
def test_get_entities_miss(self):
113122
from gcloud.datastore.key import Key
114123
DATASET_ID = 'DATASET'

gcloud/datastore/test_entity.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ def test_key_getter(self):
6161
entity = self._makeOne()
6262
key = entity.key()
6363
self.assertIsInstance(key, Key)
64-
self.assertEqual(key.dataset_id, None)
64+
self.assertEqual(key.dataset_id, entity.dataset().id())
6565
self.assertEqual(key.kind, _KIND)
6666

6767
def test_key_setter(self):
@@ -196,7 +196,7 @@ def test_save_w_returned_key_exclude_from_indexes(self):
196196
connection = _Connection()
197197
connection._save_result = (True, _ID)
198198
dataset = _Dataset(connection)
199-
key = Key('KIND', dataset_id='DATASET')
199+
key = Key('KIND', dataset_id=_DATASET_ID)
200200
entity = self._makeOne(dataset, exclude_from_indexes=['foo'])
201201
entity.key(key)
202202
entity['foo'] = 'Foo'

0 commit comments

Comments
 (0)