From c6166718b698a3a6c39e925aaedb69987a0e0487 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 22 Dec 2014 18:27:38 -0800 Subject: [PATCH 1/2] Make Key constructor easier to use by taking positional args. Addresses fourth part of #451. --- gcloud/datastore/entity.py | 2 +- gcloud/datastore/helpers.py | 14 +-- gcloud/datastore/key.py | 116 ++++++++++++++++++----- gcloud/datastore/query.py | 2 +- gcloud/datastore/test___init__.py | 2 +- gcloud/datastore/test_connection.py | 91 ++++++++---------- gcloud/datastore/test_dataset.py | 27 +++--- gcloud/datastore/test_entity.py | 7 +- gcloud/datastore/test_helpers.py | 53 ++++------- gcloud/datastore/test_key.py | 137 ++++++++++++---------------- gcloud/datastore/test_query.py | 8 +- regression/datastore.py | 16 ++-- regression/populate_datastore.py | 32 +++---- 13 files changed, 252 insertions(+), 255 deletions(-) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 65dc5e66f99d..31b41e1cb8eb 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -100,7 +100,7 @@ def __init__(self, dataset=None, kind=None, exclude_from_indexes=()): # _implicit_environ._DatastoreBase to avoid split MRO. self._dataset = dataset or _implicit_environ.DATASET if kind: - self._key = Key(path=[{'kind': kind}]) + self._key = Key(kind) else: self._key = None self._exclude_from_indexes = set(exclude_from_indexes) diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 54c8ab8855f0..2231f5b5030c 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -65,19 +65,15 @@ def key_from_protobuf(pb): :rtype: :class:`gcloud.datastore.key.Key` :returns: a new `Key` instance """ - path = [] + path_args = [] for element in pb.path_element: - element_dict = {'kind': element.kind} - + path_args.append(element.kind) if element.HasField('id'): - element_dict['id'] = element.id - + path_args.append(element.id) # This is safe: we expect proto objects returned will only have # one of `name` or `id` set. if element.HasField('name'): - element_dict['name'] = element.name - - path.append(element_dict) + path_args.append(element.name) dataset_id = None if pb.partition_id.HasField('dataset_id'): @@ -86,7 +82,7 @@ def key_from_protobuf(pb): if pb.partition_id.HasField('namespace'): namespace = pb.partition_id.namespace - return Key(path, namespace, dataset_id) + return Key(*path_args, namespace=namespace, dataset_id=dataset_id) def _pb_attr_value(val): diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index ea970dba3cd4..3de5ffa8bb45 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -15,6 +15,8 @@ """Create / interact with gcloud datastore keys.""" import copy +from itertools import izip +import six from gcloud.datastore import datastore_v1_pb2 as datastore_pb @@ -22,21 +24,42 @@ class Key(object): """An immutable representation of a datastore Key. + To create a basic key: + + >>> Key('EntityKind', 1234) + + >>> Key('EntityKind', 'foo') + + + To create a key with a parent: + + >>> Key('Parent', 'foo', 'Child', 1234) + + + To create a paritial key: + + >>> Key('Parent', 'foo', 'Child') + + .. automethod:: __init__ """ - def __init__(self, path=None, namespace=None, dataset_id=None): + def __init__(self, *path_args, **kwargs): """Constructor / initializer for a key. - :type namespace: :class:`str` - :param namespace: A namespace identifier for the key. + :type path_args: tuple of strings and ints + :param path_args: May represent a partial (odd length) or full (even + length) key path. - :type path: sequence of dicts - :param path: Each dict must have keys 'kind' (a string) and optionally - 'name' (a string) or 'id' (an integer). + :type namespace: :class:`str` + :param namespace: A namespace identifier for the key. Can only be + passed as a keyword argument. :type dataset_id: string - :param dataset: The dataset ID assigned by back-end for the key. + :param dataset_id: The dataset ID associated with the key. Can only be + passed as a keyword argument. + + # This note will be obsolete by the end of #451. .. note:: The key's ``_dataset_id`` field must be None for keys created @@ -46,10 +69,50 @@ def __init__(self, path=None, namespace=None, dataset_id=None): returned from the datastore backend. The application **must** treat any value set by the back-end as opaque. """ - self._path = path or [{'kind': ''}] + self._path = self._parse_path(path_args) + self._flat_path = path_args self._parent = None - self._namespace = namespace - self._dataset_id = dataset_id + self._namespace = kwargs.get('namespace') + self._dataset_id = kwargs.get('dataset_id') + + @staticmethod + def _parse_path(path_args): + """Parses positional arguments into key path with kinds and IDs. + + :rtype: list of dict + :returns: A list of key parts with kind and id or name set. + :raises: `ValueError` if there are no `path_args`, if one of the + kinds is not a string or if one of the IDs/names is not + a string or an integer. + """ + if len(path_args) == 0: + raise ValueError('Key path must not be empty.') + + kind_list = path_args[::2] + id_or_name_list = path_args[1::2] + if len(path_args) % 2 == 1: + # Add dummy None to be ignored below. + id_or_name_list += (None,) + + result = [] + for kind, id_or_name in izip(kind_list, id_or_name_list): + curr_key_part = {} + if isinstance(kind, six.string_types): + curr_key_part['kind'] = kind + else: + raise ValueError(kind, 'Kind was not a string.') + + if isinstance(id_or_name, six.string_types): + curr_key_part['name'] = id_or_name + elif isinstance(id_or_name, six.integer_types): + curr_key_part['id'] = id_or_name + elif id_or_name is not None: + raise ValueError(id_or_name, + 'ID/name was not a string or integer.') + + result.append(curr_key_part) + + return result def _clone(self): """Duplicates the Key. @@ -74,8 +137,8 @@ def to_protobuf(self): if self.dataset_id is not None: key.partition_id.dataset_id = self.dataset_id - if self._namespace: - key.partition_id.namespace = self._namespace + if self.namespace: + key.partition_id.namespace = self.namespace for item in self.path: element = key.path_element.add() @@ -118,6 +181,15 @@ def path(self): """ return copy.deepcopy(self._path) + @property + def flat_path(self): + """Getter for the key path as a tuple. + + :rtype: :class:`tuple` of string and int + :returns: The tuple of elements in the path. + """ + return self._flat_path + @property def kind(self): """Kind getter. Based on the last element of path. @@ -125,8 +197,7 @@ def kind(self): :rtype: :class:`str` :returns: The kind of the current key. """ - if self.path: - return self.path[-1].get('kind') + return self.path[-1]['kind'] @property def id(self): @@ -135,8 +206,7 @@ def id(self): :rtype: :class:`int` :returns: The (integer) ID of the key. """ - if self.path: - return self.path[-1].get('id') + return self.path[-1].get('id') @property def name(self): @@ -145,8 +215,7 @@ def name(self): :rtype: :class:`str` :returns: The (string) name of the key. """ - if self.path: - return self.path[-1].get('name') + return self.path[-1].get('name') @property def id_or_name(self): @@ -178,14 +247,17 @@ def _make_parent(self): element of self's path. If self has only one path element, returns None. """ - parent_path = self.path[:-1] - if parent_path: - return Key(path=parent_path, dataset_id=self.dataset_id, + if self.is_partial: + parent_args = self.flat_path[:-1] + else: + parent_args = self.flat_path[:-2] + if parent_args: + return Key(*parent_args, dataset_id=self.dataset_id, namespace=self.namespace) @property def parent(self): - """Getter: return a new key for the next highest element in path. + """The parent of the current key. :rtype: :class:`gcloud.datastore.key.Key` or `NoneType` :returns: a new `Key` instance, whose path consists of all but the last diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 8e7f442928fd..5fc52f689dc2 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -172,7 +172,7 @@ def ancestor(self, ancestor): This will return a clone of the current :class:`Query` filtered by the ancestor provided. For example:: - >>> parent_key = Key(path=[{'kind': 'Person', 'name': '1'}]) + >>> parent_key = Key('Person', '1') >>> query = dataset.query('Person') >>> filtered_query = query.ancestor(parent_key) diff --git a/gcloud/datastore/test___init__.py b/gcloud/datastore/test___init__.py index 3f6a28e9ec78..603eee1543c2 100644 --- a/gcloud/datastore/test___init__.py +++ b/gcloud/datastore/test___init__.py @@ -167,9 +167,9 @@ def test_allocate_ids(self): from gcloud._testing import _Monkey CUSTOM_DATASET = _Dataset() - INCOMPLETE_KEY = Key() NUM_IDS = 2 with _Monkey(_implicit_environ, DATASET=CUSTOM_DATASET): + INCOMPLETE_KEY = Key('KIND') result = gcloud.datastore.allocate_ids(INCOMPLETE_KEY, NUM_IDS) # Check the IDs returned. diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index 5a5e8e085bbc..857cc3132b3d 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -22,6 +22,13 @@ def _getTargetClass(self): return Connection + def _make_key_pb(self, dataset_id, id=1234): + from gcloud.datastore.key import Key + path_args = ('Kind',) + if id is not None: + path_args += (id,) + return Key(*path_args, dataset_id=dataset_id).to_protobuf() + def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) @@ -206,10 +213,9 @@ def test_dataset(self): def test_lookup_single_key_empty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -233,10 +239,9 @@ def test_lookup_single_key_empty_response(self): def test_lookup_single_key_empty_response_w_eventual(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -262,11 +267,9 @@ def test_lookup_single_key_empty_response_w_eventual(self): self.assertEqual(request.read_options.transaction, '') def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): - from gcloud.datastore.key import Key - DATASET_ID = 'DATASET' TRANSACTION = 'TRANSACTION' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) conn = self._makeOne() conn.transaction(Transaction(TRANSACTION)) self.assertRaises( @@ -274,11 +277,10 @@ def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): def test_lookup_single_key_empty_response_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' TRANSACTION = 'TRANSACTION' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() conn.transaction(Transaction(TRANSACTION)) @@ -304,10 +306,9 @@ def test_lookup_single_key_empty_response_w_transaction(self): def test_lookup_single_key_nonempty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.LookupResponse() entity = datastore_pb.Entity() entity.key.CopyFrom(key_pb) @@ -336,11 +337,10 @@ def test_lookup_single_key_nonempty_response(self): def test_lookup_multiple_keys_empty_response(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb = datastore_pb.LookupResponse() conn = self._makeOne() URI = '/'.join([ @@ -365,11 +365,10 @@ def test_lookup_multiple_keys_empty_response(self): def test_lookup_multiple_keys_w_missing(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb = datastore_pb.LookupResponse() er_1 = rsp_pb.missing.add() er_1.entity.key.CopyFrom(key_pb1) @@ -401,10 +400,9 @@ def test_lookup_multiple_keys_w_missing(self): self.assertEqual(keys[1], key_pb2) def test_lookup_multiple_keys_w_missing_non_empty(self): - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) conn = self._makeOne() missing = ['this', 'list', 'is', 'not', 'empty'] self.assertRaises( @@ -413,11 +411,10 @@ def test_lookup_multiple_keys_w_missing_non_empty(self): def test_lookup_multiple_keys_w_deferred(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb = datastore_pb.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) @@ -451,10 +448,9 @@ def test_lookup_multiple_keys_w_deferred(self): self.assertEqual(keys[1], key_pb2) def test_lookup_multiple_keys_w_deferred_non_empty(self): - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) conn = self._makeOne() deferred = ['this', 'list', 'is', 'not', 'empty'] self.assertRaises( @@ -463,11 +459,10 @@ def test_lookup_multiple_keys_w_deferred_non_empty(self): def test_lookup_multiple_keys_w_deferred_from_backend_but_not_passed(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb1 = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() - key_pb2 = Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf() + key_pb1 = self._make_key_pb(DATASET_ID) + key_pb2 = self._make_key_pb(DATASET_ID, id=2345) rsp_pb1 = datastore_pb.LookupResponse() entity1 = datastore_pb.Entity() entity1.key.CopyFrom(key_pb1) @@ -740,10 +735,9 @@ def test_begin_transaction_explicit_serialize(self): def test_commit_wo_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -775,13 +769,12 @@ def test_commit_wo_transaction(self): def test_commit_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key class Xact(object): def id(self): return 'xact' DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() mutation = datastore_pb.Mutation() insert = mutation.upsert.add() @@ -884,16 +877,15 @@ def test_allocate_ids_empty(self): def test_allocate_ids_non_empty(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' before_key_pbs = [ - Key(path=[{'kind': 'Kind'}]).to_protobuf(), - Key(path=[{'kind': 'Kind'}]).to_protobuf(), + self._make_key_pb(DATASET_ID, id=None), + self._make_key_pb(DATASET_ID, id=None), ] after_key_pbs = [ - Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf(), - Key(path=[{'kind': 'Kind', 'id': 2345}]).to_protobuf(), + self._make_key_pb(DATASET_ID), + self._make_key_pb(DATASET_ID, id=2345), ] rsp_pb = datastore_pb.AllocateIdsResponse() rsp_pb.key.add().CopyFrom(after_key_pbs[0]) @@ -919,10 +911,9 @@ def test_allocate_ids_non_empty(self): def test_save_entity_wo_transaction_w_upsert(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -958,11 +949,10 @@ def test_save_entity_wo_transaction_w_upsert(self): def test_save_entity_w_exclude_from_indexes(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key import operator DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -1008,11 +998,10 @@ def test_save_entity_w_exclude_from_indexes(self): def test_save_entity_wo_transaction_w_auto_id(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind'}]).to_protobuf() - updated_key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID, id=None) + updated_key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() mr_pb = rsp_pb.mutation_result mr_pb.index_updates = 0 @@ -1052,7 +1041,6 @@ def test_save_entity_wo_transaction_w_auto_id(self): def test_save_entity_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -1060,7 +1048,7 @@ class Xact(object): def mutation(self): return mutation DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -1074,7 +1062,6 @@ def mutation(self): def test_save_entity_w_transaction_nested_entity(self): from gcloud.datastore.connection import datastore_pb from gcloud.datastore.entity import Entity - from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -1084,7 +1071,7 @@ def mutation(self): DATASET_ID = 'DATASET' nested = Entity() nested['bar'] = u'Bar' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) @@ -1097,10 +1084,9 @@ def mutation(self): def test_delete_entities_wo_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() URI = '/'.join([ @@ -1131,7 +1117,6 @@ def test_delete_entities_wo_transaction(self): def test_delete_entities_w_transaction(self): from gcloud.datastore.connection import datastore_pb - from gcloud.datastore.key import Key mutation = datastore_pb.Mutation() @@ -1139,7 +1124,7 @@ class Xact(object): def mutation(self): return mutation DATASET_ID = 'DATASET' - key_pb = Key(path=[{'kind': 'Kind', 'id': 1234}]).to_protobuf() + key_pb = self._make_key_pb(DATASET_ID) rsp_pb = datastore_pb.CommitResponse() conn = self._makeOne() conn.transaction(Xact()) diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index 87268efb39e4..d8b83183a72e 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -81,7 +81,7 @@ def test_get_entity_miss(self): DATASET_ID = 'DATASET' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=[{'kind': 'Kind', 'id': 1234}]) + key = Key('Kind', 1234, dataset_id=DATASET_ID) self.assertEqual(dataset.get_entity(key), None) def test_get_entity_hit(self): @@ -101,7 +101,7 @@ def test_get_entity_hit(self): prop.value.string_value = 'Foo' connection = _Connection(entity_pb) dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH) + key = Key(KIND, ID, dataset_id=DATASET_ID) result = dataset.get_entity(key) key = result.key() self.assertEqual(key.dataset_id, DATASET_ID) @@ -114,7 +114,7 @@ def test_get_entities_miss(self): DATASET_ID = 'DATASET' connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=[{'kind': 'Kind', 'id': 1234}]) + key = Key('Kind', 1234, dataset_id=DATASET_ID) self.assertEqual(dataset.get_entities([key]), []) def test_get_entities_miss_w_missing(self): @@ -123,7 +123,6 @@ def test_get_entities_miss_w_missing(self): DATASET_ID = 'DATASET' KIND = 'Kind' ID = 1234 - PATH = [{'kind': KIND, 'id': ID}] missed = datastore_pb.Entity() missed.key.partition_id.dataset_id = DATASET_ID path_element = missed.key.path_element.add() @@ -132,7 +131,7 @@ def test_get_entities_miss_w_missing(self): connection = _Connection() connection._missing = [missed] dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH, dataset_id=DATASET_ID) + key = Key(KIND, ID, dataset_id=DATASET_ID) missing = [] entities = dataset.get_entities([key], missing=missing) self.assertEqual(entities, []) @@ -142,12 +141,9 @@ def test_get_entities_miss_w_missing(self): def test_get_entities_miss_w_deferred(self): from gcloud.datastore.key import Key DATASET_ID = 'DATASET' - KIND = 'Kind' - ID = 1234 - PATH = [{'kind': KIND, 'id': ID}] connection = _Connection() dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH, dataset_id=DATASET_ID) + key = Key('Kind', 1234, dataset_id=DATASET_ID) connection._deferred = [key.to_protobuf()] deferred = [] entities = dataset.get_entities([key], deferred=deferred) @@ -172,21 +168,22 @@ def test_get_entities_hit(self): prop.value.string_value = 'Foo' connection = _Connection(entity_pb) dataset = self._makeOne(DATASET_ID, connection) - key = Key(path=PATH) + key = Key(KIND, ID, dataset_id=DATASET_ID) result, = dataset.get_entities([key]) - key = result.key() - self.assertEqual(key.dataset_id, DATASET_ID) - self.assertEqual(key.path, PATH) + new_key = result.key() + self.assertFalse(new_key is key) + self.assertEqual(new_key.dataset_id, DATASET_ID) + self.assertEqual(new_key.path, PATH) self.assertEqual(list(result), ['foo']) self.assertEqual(result['foo'], 'Foo') def test_allocate_ids(self): from gcloud.datastore.key import Key - INCOMPLETE_KEY = Key(path=[{'kind': 'foo'}]) + DATASET_ID = 'DATASET' + INCOMPLETE_KEY = Key('KIND', dataset_id=DATASET_ID) CONNECTION = _Connection() NUM_IDS = 2 - DATASET_ID = 'foo' DATASET = self._makeOne(DATASET_ID, connection=CONNECTION) result = DATASET.allocate_ids(INCOMPLETE_KEY, NUM_IDS) diff --git a/gcloud/datastore/test_entity.py b/gcloud/datastore/test_entity.py index ee6241e31c13..97971ca7ce4e 100644 --- a/gcloud/datastore/test_entity.py +++ b/gcloud/datastore/test_entity.py @@ -74,7 +74,7 @@ def test_from_key_wo_dataset(self): from gcloud.datastore.key import Key klass = self._getTargetClass() - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + key = Key(_KIND, _ID, dataset_id='DATASET') entity = klass.from_key(key) self.assertTrue(entity.dataset() is None) self.assertEqual(entity.kind(), _KIND) @@ -88,7 +88,7 @@ def test_from_key_w_dataset(self): klass = self._getTargetClass() dataset = Dataset(_DATASET_ID) - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + key = Key(_KIND, _ID, dataset_id=_DATASET_ID) entity = klass.from_key(key, dataset) self.assertTrue(entity.dataset() is dataset) self.assertEqual(entity.kind(), _KIND) @@ -196,8 +196,7 @@ def test_save_w_returned_key_exclude_from_indexes(self): connection = _Connection() connection._save_result = key_pb dataset = _Dataset(connection) - key = Key() - # key_pb_before = key.to_protobuf() + key = Key('KIND', dataset_id='DATASET') entity = self._makeOne(dataset, exclude_from_indexes=['foo']) entity.key(key) entity['foo'] = 'Foo' diff --git a/gcloud/datastore/test_helpers.py b/gcloud/datastore/test_helpers.py index 951e5fba7ae9..dcf36d796cc7 100644 --- a/gcloud/datastore/test_helpers.py +++ b/gcloud/datastore/test_helpers.py @@ -90,11 +90,10 @@ def _callFUT(self, val): return key_from_protobuf(val) - def _makePB(self, dataset_id=None, namespace=None, path=()): + def _makePB(self, dataset_id, namespace=None, path=()): from gcloud.datastore.datastore_v1_pb2 import Key pb = Key() - if dataset_id is not None: - pb.partition_id.dataset_id = dataset_id + pb.partition_id.dataset_id = dataset_id if namespace is not None: pb.partition_id.namespace = namespace for elem in path: @@ -108,35 +107,27 @@ def _makePB(self, dataset_id=None, namespace=None, path=()): def test_w_dataset_id_in_pb(self): _DATASET = 'DATASET' - pb = self._makePB(_DATASET) + pb = self._makePB(path=[{'kind': 'KIND'}], dataset_id=_DATASET) key = self._callFUT(pb) self.assertEqual(key.dataset_id, _DATASET) self.assertEqual(key.namespace, None) def test_w_namespace_in_pb(self): + _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' - pb = self._makePB(namespace=_NAMESPACE) + pb = self._makePB(path=[{'kind': 'KIND'}], namespace=_NAMESPACE, + dataset_id=_DATASET) key = self._callFUT(pb) - self.assertEqual(key.dataset_id, None) + self.assertEqual(key.dataset_id, _DATASET) self.assertEqual(key.namespace, _NAMESPACE) def test_w_path_in_pb(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - pb = self._makePB(_DATASET, _NAMESPACE) - _PARENT = 'PARENT' - _CHILD = 'CHILD' - _GRANDCHILD = 'GRANDCHILD' - _ID = 1234 - _ID2 = 5678 - _NAME = 'NAME' - _NAME2 = 'NAME2' _PATH = [ - {'kind': _PARENT, 'name': _NAME}, - {'kind': _CHILD, 'id': _ID}, - {'kind': _GRANDCHILD, 'id': _ID2, 'name': _NAME2}, + {'kind': 'PARENT', 'name': 'NAME'}, + {'kind': 'CHILD', 'id': 1234}, + {'kind': 'GRANDCHILD', 'id': 5678}, ] - pb = self._makePB(path=_PATH) + pb = self._makePB(path=_PATH, dataset_id='DATASET') key = self._callFUT(pb) self.assertEqual(key.path, _PATH) @@ -174,11 +165,7 @@ def test_datetime_w_zone(self): def test_key(self): from gcloud.datastore.key import Key - _DATASET = 'DATASET' - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = Key(dataset_id=_DATASET, path=_PATH) + key = Key('PATH', 1234, dataset_id='DATASET') name, value = self._callFUT(key) self.assertEqual(name, 'key_value') self.assertEqual(value, key.to_protobuf()) @@ -272,12 +259,8 @@ def test_key(self): from gcloud.datastore.datastore_v1_pb2 import Value from gcloud.datastore.key import Key - _DATASET = 'DATASET' - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] pb = Value() - expected = Key(dataset_id=_DATASET, path=_PATH).to_protobuf() + expected = Key('KIND', 1234, dataset_id='DATASET').to_protobuf() pb.key_value.CopyFrom(expected) found = self._callFUT(pb) self.assertEqual(found.to_protobuf(), expected) @@ -308,6 +291,8 @@ def test_entity(self): pb = Value() entity_pb = pb.entity_value + entity_pb.key.path_element.add(kind='KIND') + entity_pb.key.partition_id.dataset_id = 'DATASET' prop_pb = entity_pb.property.add() prop_pb.name = 'foo' prop_pb.value.string_value = 'Foo' @@ -376,12 +361,8 @@ def test_datetime(self): def test_key(self): from gcloud.datastore.key import Key - _DATASET = 'DATASET' - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] pb = self._makePB() - key = Key(dataset_id=_DATASET, path=_PATH) + key = Key('KIND', 1234, dataset_id='DATASET') self._callFUT(pb, key) value = pb.key_value self.assertEqual(value, key.to_protobuf()) @@ -464,7 +445,7 @@ def test_entity_w_key(self): from gcloud.datastore.key import Key pb = self._makePB() - key = Key(path=[{'kind': 'KIND', 'id': 123}]) + key = Key('KIND', 123, dataset_id='DATASET') entity = Entity().key(key) entity['foo'] = u'Foo' self._callFUT(pb, entity) diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index 46e01f397511..af0d56278a3d 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -21,15 +21,11 @@ def _getTargetClass(self): from gcloud.datastore.key import Key return Key - def _makeOne(self, path=None, namespace=None, dataset_id=None): - return self._getTargetClass()(path, namespace, dataset_id) + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) - def test_ctor_defaults(self): - key = self._makeOne() - self.assertEqual(key.dataset_id, None) - self.assertEqual(key.namespace, None) - self.assertEqual(key.kind, '') - self.assertEqual(key.path, [{'kind': ''}]) + def test_ctor_empty(self): + self.assertRaises(ValueError, self._makeOne) def test_ctor_explicit(self): _DATASET = 'DATASET' @@ -37,19 +33,27 @@ def test_ctor_explicit(self): _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(_PATH, _NAMESPACE, _DATASET) + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + dataset_id=_DATASET) self.assertEqual(key.dataset_id, _DATASET) self.assertEqual(key.namespace, _NAMESPACE) self.assertEqual(key.kind, _KIND) self.assertEqual(key.path, _PATH) + def test_ctor_bad_kind(self): + self.assertRaises(ValueError, self._makeOne, object()) + + def test_ctor_bad_id_or_name(self): + self.assertRaises(ValueError, self._makeOne, 'KIND', object()) + def test__clone(self): _DATASET = 'DATASET' _NAMESPACE = 'NAMESPACE' _KIND = 'KIND' _ID = 1234 _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(_PATH, _NAMESPACE, _DATASET) + key = self._makeOne(_KIND, _ID, namespace=_NAMESPACE, + dataset_id=_DATASET) clone = key._clone() self.assertEqual(clone.dataset_id, _DATASET) self.assertEqual(clone.namespace, _NAMESPACE) @@ -58,25 +62,34 @@ def test__clone(self): def test_to_protobuf_defaults(self): from gcloud.datastore.datastore_v1_pb2 import Key as KeyPB - key = self._makeOne() + _KIND = 'KIND' + key = self._makeOne(_KIND) pb = key.to_protobuf() self.assertTrue(isinstance(pb, KeyPB)) + + # Check partition ID. self.assertEqual(pb.partition_id.dataset_id, '') + self.assertFalse(pb.partition_id.HasField('dataset_id')) self.assertEqual(pb.partition_id.namespace, '') + self.assertFalse(pb.partition_id.HasField('namespace')) + + # Check the element PB matches the partial key and kind. elem, = list(pb.path_element) - self.assertEqual(elem.kind, '') + self.assertEqual(elem.kind, _KIND) self.assertEqual(elem.name, '') + self.assertFalse(elem.HasField('name')) self.assertEqual(elem.id, 0) + self.assertFalse(elem.HasField('id')) - def test_to_protobuf_w_explicit_dataset_no_prefix(self): + def test_to_protobuf_w_explicit_dataset(self): _DATASET = 'DATASET' - key = self._makeOne(dataset_id=_DATASET) + key = self._makeOne('KIND', dataset_id=_DATASET) pb = key.to_protobuf() self.assertEqual(pb.partition_id.dataset_id, _DATASET) def test_to_protobuf_w_explicit_namespace(self): _NAMESPACE = 'NAMESPACE' - key = self._makeOne(namespace=_NAMESPACE) + key = self._makeOne('KIND', namespace=_NAMESPACE) pb = key.to_protobuf() self.assertEqual(pb.partition_id.namespace, _NAMESPACE) @@ -85,115 +98,77 @@ def test_to_protobuf_w_explicit_path(self): _CHILD = 'CHILD' _ID = 1234 _NAME = 'NAME' - _PATH = [ - {'kind': _PARENT, 'name': _NAME}, - {'kind': _CHILD, 'id': _ID}, - {}, - ] - key = self._makeOne(path=_PATH) + key = self._makeOne(_PARENT, _NAME, _CHILD, _ID) pb = key.to_protobuf() elems = list(pb.path_element) - self.assertEqual(len(elems), len(_PATH)) + self.assertEqual(len(elems), 2) self.assertEqual(elems[0].kind, _PARENT) self.assertEqual(elems[0].name, _NAME) self.assertEqual(elems[1].kind, _CHILD) self.assertEqual(elems[1].id, _ID) - self.assertEqual(elems[2].kind, '') - self.assertEqual(elems[2].name, '') - self.assertEqual(elems[2].id, 0) + + def test_to_protobuf_w_no_kind(self): + key = self._makeOne('KIND') + # Force the 'kind' to be unset. Maybe `to_protobuf` should fail + # on this? The backend certainly will. + key._path[-1].pop('kind') + pb = key.to_protobuf() + self.assertEqual(pb.partition_id.dataset_id, '') + self.assertFalse(pb.path_element[0].HasField('kind')) def test_is_partial_no_name_or_id(self): - key = self._makeOne() + key = self._makeOne('KIND') self.assertTrue(key.is_partial) def test_is_partial_w_id(self): - _KIND = 'KIND' _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(path=_PATH) + key = self._makeOne('KIND', _ID) self.assertFalse(key.is_partial) def test_is_partial_w_name(self): - _KIND = 'KIND' _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'name': _NAME}] - key = self._makeOne(path=_PATH) + key = self._makeOne('KIND', _NAME) self.assertFalse(key.is_partial) - def test_kind_getter_empty_path(self): - _DATASET = 'DATASET' - _NAMESPACE = 'NAMESPACE' - key = self._makeOne(namespace=_NAMESPACE, dataset_id=_DATASET) - key._path = () # edge case - self.assertEqual(key.kind, None) - - def test_id_getter_empty_path(self): - key = self._makeOne() - key._path = () # edge case - self.assertEqual(key.id, None) - - def test_name_getter_empty_path(self): - key = self._makeOne() - key._path = () # edge case - self.assertEqual(key.name, None) - def test_id_or_name_no_name_or_id(self): - key = self._makeOne() + key = self._makeOne('KIND') self.assertEqual(key.id_or_name, None) def test_id_or_name_no_name_or_id_child(self): - _KIND = 'KIND' - _NAME = 'NAME' - _ID = 5678 - _PATH = [{'kind': _KIND, 'id': _ID, 'name': _NAME}, {'kind': ''}] - key = self._makeOne(path=_PATH) + key = self._makeOne('KIND1', 1234, 'KIND2') self.assertEqual(key.id_or_name, None) def test_id_or_name_w_id_only(self): - _KIND = 'KIND' - _ID = 1234 - _PATH = [{'kind': _KIND, 'id': _ID}] - key = self._makeOne(path=_PATH) - self.assertEqual(key.id_or_name, _ID) - - def test_id_or_name_w_id_and_name(self): - _KIND = 'KIND' _ID = 1234 - _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'id': _ID, 'name': _NAME}] - key = self._makeOne(path=_PATH) + key = self._makeOne('KIND', _ID) self.assertEqual(key.id_or_name, _ID) def test_id_or_name_w_name_only(self): - _KIND = 'KIND' _NAME = 'NAME' - _PATH = [{'kind': _KIND, 'name': _NAME}] - key = self._makeOne(path=_PATH) + key = self._makeOne('KIND', _NAME) self.assertEqual(key.id_or_name, _NAME) def test_parent_default(self): - key = self._makeOne() + key = self._makeOne('KIND') self.assertEqual(key.parent, None) def test_parent_explicit_top_level(self): - key = self._makeOne(path=[{'kind': 'abc', 'name': 'def'}]) + key = self._makeOne('KIND', 1234) self.assertEqual(key.parent, None) def test_parent_explicit_nested(self): - parent_part = {'kind': 'abc', 'name': 'def'} - key = self._makeOne(path=[ - parent_part, - {'kind': 'ghi', 'id': 123}, - ]) - self.assertEqual(key.parent.path, [parent_part]) + _PARENT_KIND = 'KIND1' + _PARENT_ID = 1234 + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') + self.assertEqual(key.parent.path, _PARENT_PATH) def test_parent_multiple_calls(self): _PARENT_KIND = 'KIND1' _PARENT_ID = 1234 - _PARENT_PATH = {'kind': _PARENT_KIND, 'id': _PARENT_ID} - _PATH = [_PARENT_PATH, {'kind': 'KIND2'}] - key = self._makeOne(path=_PATH) + _PARENT_PATH = [{'kind': _PARENT_KIND, 'id': _PARENT_ID}] + key = self._makeOne(_PARENT_KIND, _PARENT_ID, 'KIND2') parent = key.parent - self.assertEqual(parent.path, [_PARENT_PATH]) + self.assertEqual(parent.path, _PARENT_PATH) new_parent = key.parent self.assertTrue(parent is new_parent) diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index d30e77bc50fd..2bcacef3e93a 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -178,7 +178,7 @@ def test_ancestor_wo_existing_ancestor_query_w_key_and_propfilter(self): _KIND = 'KIND' _ID = 123 _NAME = u'NAME' - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + key = Key(_KIND, _ID) query = self._makeOne().filter('name', '=', _NAME) after = query.ancestor(key) self.assertFalse(after is query) @@ -197,7 +197,7 @@ def test_ancestor_wo_existing_ancestor_query_w_key(self): from gcloud.datastore.key import Key _KIND = 'KIND' _ID = 123 - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + key = Key(_KIND, _ID) query = self._makeOne() after = query.ancestor(key) self.assertFalse(after is query) @@ -213,7 +213,7 @@ def test_ancestor_clears_existing_ancestor_query_w_only(self): from gcloud.datastore.key import Key _KIND = 'KIND' _ID = 123 - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + key = Key(_KIND, _ID) query = self._makeOne() between = query.ancestor(key) after = between.ancestor(None) @@ -227,7 +227,7 @@ def test_ancestor_clears_existing_ancestor_query_w_others(self): _KIND = 'KIND' _ID = 123 _NAME = u'NAME' - key = Key(path=[{'kind': _KIND, 'id': _ID}]) + key = Key(_KIND, _ID) query = self._makeOne().filter('name', '=', _NAME) between = query.ancestor(key) after = between.ancestor(None) diff --git a/regression/datastore.py b/regression/datastore.py index 5ee860639a60..f2ba27d19af8 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -41,7 +41,7 @@ def tearDown(self): class TestDatastoreAllocateIDs(TestDatastore): def test_allocate_ids(self): - incomplete_key = datastore.key.Key(path=[{'kind': 'Kind'}]) + incomplete_key = datastore.key.Key('Kind') num_ids = 10 allocated_keys = datastore.allocate_ids(incomplete_key, num_ids) self.assertEqual(len(allocated_keys), num_ids) @@ -74,9 +74,9 @@ def _get_post(self, name=None, key_id=None, post_content=None): # Update the entity key. key = None if name is not None: - key = datastore.key.Key(path=[{'kind': 'Post', 'name': name}]) + key = datastore.key.Key('Post', name) if key_id is not None: - key = datastore.key.Key(path=[{'kind': 'Post', 'id': key_id}]) + key = datastore.key.Key('Post', key_id) if key is not None: entity.key(key) @@ -146,7 +146,7 @@ def test_empty_kind(self): class TestDatastoreSaveKeys(TestDatastore): def test_save_key_self_reference(self): - key = datastore.key.Key(path=[{'kind': 'Person', 'name': 'name'}]) + key = datastore.key.Key('Person', 'name') entity = datastore.entity.Entity(kind=None).key(key) entity['fullName'] = u'Full name' entity['linkedTo'] = key # Self reference. @@ -172,8 +172,7 @@ class TestDatastoreQuery(TestDatastore): def setUpClass(cls): super(TestDatastoreQuery, cls).setUpClass() cls.CHARACTERS = populate_datastore.CHARACTERS - cls.ANCESTOR_KEY = datastore.key.Key( - path=[populate_datastore.ANCESTOR]) + cls.ANCESTOR_KEY = datastore.key.Key(*populate_datastore.ANCESTOR) def _base_query(self): return datastore.query.Query(kind='Character').ancestor( @@ -220,8 +219,7 @@ def test_ancestor_query(self): self.assertEqual(len(entities), expected_matches) def test_query___key___filter(self): - rickard_key = datastore.key.Key( - path=[populate_datastore.ANCESTOR, populate_datastore.RICKARD]) + rickard_key = datastore.key.Key(*populate_datastore.RICKARD) query = self._base_query().filter('__key__', '=', rickard_key) expected_matches = 1 @@ -337,7 +335,7 @@ def test_query_group_by(self): class TestDatastoreTransaction(TestDatastore): def test_transaction(self): - key = datastore.key.Key(path=[{'kind': 'Company', 'name': 'Google'}]) + key = datastore.key.Key('Company', 'Google') entity = datastore.entity.Entity(kind=None).key(key) entity['url'] = u'www.google.com' diff --git a/regression/populate_datastore.py b/regression/populate_datastore.py index 62ed9053d945..cb5298ff5f17 100644 --- a/regression/populate_datastore.py +++ b/regression/populate_datastore.py @@ -22,24 +22,18 @@ from regression import regression_utils -ANCESTOR = {'kind': 'Book', 'name': 'GoT'} -RICKARD = {'kind': 'Character', 'name': 'Rickard'} -EDDARD = {'kind': 'Character', 'name': 'Eddard'} +ANCESTOR = ('Book', 'GoT') +RICKARD = ANCESTOR + ('Character', 'Rickard') +EDDARD = RICKARD + ('Character', 'Eddard') KEY_PATHS = [ - [ANCESTOR, RICKARD], - [ANCESTOR, RICKARD, EDDARD], - [ANCESTOR, - {'kind': 'Character', 'name': 'Catelyn'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Arya'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Sansa'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Robb'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Bran'}], - [ANCESTOR, RICKARD, EDDARD, - {'kind': 'Character', 'name': 'Jon Snow'}], + RICKARD, + EDDARD, + ANCESTOR + ('Character', 'Catelyn'), + EDDARD + ('Character', 'Arya'), + EDDARD + ('Character', 'Sansa'), + EDDARD + ('Character', 'Robb'), + EDDARD + ('Character', 'Bran'), + EDDARD + ('Character', 'Jon Snow'), ] CHARACTERS = [ { @@ -90,10 +84,10 @@ def add_characters(): dataset = regression_utils.get_dataset() with dataset.transaction(): for key_path, character in zip(KEY_PATHS, CHARACTERS): - if key_path[-1]['name'] != character['name']: + if key_path[-1] != character['name']: raise ValueError(('Character and key don\'t agree', key_path, character)) - key = datastore.key.Key(path=key_path) + key = datastore.key.Key(*key_path) entity = datastore.entity.Entity(dataset=dataset).key(key) entity.update(character) entity.save() From 676b20c2122f72d6ca70d52ca2ac61f306f004ac Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 30 Dec 2014 11:05:06 -0800 Subject: [PATCH 2/2] Fixing false positive key paths containing None. --- gcloud/datastore/key.py | 7 ++++--- gcloud/datastore/test_key.py | 2 ++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 3de5ffa8bb45..c731142fb212 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -90,9 +90,10 @@ def _parse_path(path_args): kind_list = path_args[::2] id_or_name_list = path_args[1::2] + # Dummy sentinel value to pad incomplete key to even length path. + partial_ending = object() if len(path_args) % 2 == 1: - # Add dummy None to be ignored below. - id_or_name_list += (None,) + id_or_name_list += (partial_ending,) result = [] for kind, id_or_name in izip(kind_list, id_or_name_list): @@ -106,7 +107,7 @@ def _parse_path(path_args): curr_key_part['name'] = id_or_name elif isinstance(id_or_name, six.integer_types): curr_key_part['id'] = id_or_name - elif id_or_name is not None: + elif id_or_name is not partial_ending: raise ValueError(id_or_name, 'ID/name was not a string or integer.') diff --git a/gcloud/datastore/test_key.py b/gcloud/datastore/test_key.py index af0d56278a3d..08b79cfa5042 100644 --- a/gcloud/datastore/test_key.py +++ b/gcloud/datastore/test_key.py @@ -45,6 +45,8 @@ def test_ctor_bad_kind(self): def test_ctor_bad_id_or_name(self): self.assertRaises(ValueError, self._makeOne, 'KIND', object()) + self.assertRaises(ValueError, self._makeOne, 'KIND', None) + self.assertRaises(ValueError, self._makeOne, 'KIND', 10, 'KIND2', None) def test__clone(self): _DATASET = 'DATASET'