diff --git a/gcloud/datastore/__init__.py b/gcloud/datastore/__init__.py index fee5a98b47b9..5391092caba8 100644 --- a/gcloud/datastore/__init__.py +++ b/gcloud/datastore/__init__.py @@ -7,8 +7,10 @@ ... 'long-email@googleapis.com', ... '/path/to/private.key') >>> # Then do other things... ->>> query = dataset.query().kind('EntityKind') ->>> entity = dataset.entity('EntityKind') +>>> from gcloud.datastore.entity import Entity +>>> from gcloud.datastore.query import Query +>>> query = Query(dataset, 'EntityKind') +>>> entity = Entity(dataset, 'EntityKind') The main concepts with this API are: @@ -32,6 +34,14 @@ which represents a lookup or search over the rows in the datastore. """ +# import submodules which register factories. +import gcloud.datastore.connection +import gcloud.datastore.dataset +import gcloud.datastore.entity +import gcloud.datastore.key +import gcloud.datastore.query +import gcloud.datastore.transaction + __version__ = '0.1.2' SCOPE = ('https://www.googleapis.com/auth/datastore ', @@ -80,7 +90,9 @@ def get_dataset(dataset_id, client_email, private_key_path): >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> # Now you can do things with the dataset. - >>> dataset.query().kind('TestKind').fetch() + >>> from gcloud.datastore.query import Query + >>> query = Query(dataset, 'TestKind') + >>> query.fetch() [...] :type dataset_id: string diff --git a/gcloud/datastore/_helpers.py b/gcloud/datastore/_helpers.py index 1ebba5377457..453290e9b063 100644 --- a/gcloud/datastore/_helpers.py +++ b/gcloud/datastore/_helpers.py @@ -8,12 +8,53 @@ from google.protobuf.internal.type_checkers import Int64ValueChecker import pytz -from gcloud.datastore.entity import Entity -from gcloud.datastore.key import Key INT_VALUE_CHECKER = Int64ValueChecker() +class DuplicateFactory(Exception): + """Conflicting factory registration.""" + + +class InvalidFactory(Exception): + """Unknown factory invocation.""" + + +class _FactoryRegistry(object): + """Single registry for named factories. + + This registry provides an API for modules to instantiate objects from + other modules without needing to import them directly, allowing us + to break circular import chains. + """ + + _MARKER = object() + + def __init__(self): + self._registered = {} + + def register(self, name, factory): + """Register a factory by name.""" + if self._registered.get(name) not in (None, factory): + raise DuplicateFactory(name) + self._registered[name] = factory + + def get(self, name, default=_MARKER): + """Look up a factory by name.""" + found = self._registered.get(name, default) + if found is self._MARKER: + raise InvalidFactory(name) + return found + + def invoke(self, name, *args, **kw): + """Look up and call a factory by name.""" + return self.get(name)(*args, **kw) + + +_FACTORIES = _FactoryRegistry() # singleton +del _FactoryRegistry + + def _get_protobuf_attribute_and_value(val): """Given a value, return the protobuf attribute name and proper value. @@ -47,6 +88,8 @@ def _get_protobuf_attribute_and_value(val): :returns: A tuple of the attribute name and proper value type. """ + key_class = _FACTORIES.get('Key') + entity_class = _FACTORIES.get('Entity') if isinstance(val, datetime.datetime): name = 'timestamp_microseconds' @@ -58,8 +101,6 @@ def _get_protobuf_attribute_and_value(val): val = val.astimezone(pytz.utc) # Convert the datetime to a microsecond timestamp. value = long(calendar.timegm(val.timetuple()) * 1e6) + val.microsecond - elif isinstance(val, Key): - name, value = 'key', val.to_protobuf() elif isinstance(val, bool): name, value = 'boolean', val elif isinstance(val, float): @@ -71,10 +112,12 @@ def _get_protobuf_attribute_and_value(val): name, value = 'string', val elif isinstance(val, (bytes, str)): name, value = 'blob', val - elif isinstance(val, Entity): - name, value = 'entity', val elif isinstance(val, list): name, value = 'list', val + elif key_class and isinstance(val, key_class): + name, value = 'key', val.to_protobuf() + elif entity_class and isinstance(val, entity_class): + name, value = 'entity', val else: raise ValueError("Unknown protobuf attr type %s" % type(val)) @@ -105,7 +148,7 @@ def _get_value_from_value_pb(value_pb): result = naive.replace(tzinfo=pytz.utc) elif value_pb.HasField('key_value'): - result = Key.from_protobuf(value_pb.key_value) + result = _FACTORIES.invoke('Key_from_protobuf', value_pb.key_value) elif value_pb.HasField('boolean_value'): result = value_pb.boolean_value @@ -123,7 +166,8 @@ def _get_value_from_value_pb(value_pb): result = value_pb.blob_value elif value_pb.HasField('entity_value'): - result = Entity.from_protobuf(value_pb.entity_value) + result = _FACTORIES.invoke( + 'Entity_from_protobuf', value_pb.entity_value) elif value_pb.list_value: result = [_get_value_from_value_pb(x) for x in value_pb.list_value] diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index aff322d90926..5c158f32fd75 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -210,9 +210,10 @@ def run_query(self, dataset_id, query_pb, namespace=None): uses this method to fetch data: >>> from gcloud import datastore + >>> from gcloud.datastore.query import Query >>> connection = datastore.get_connection(email, key_path) >>> dataset = connection.dataset('dataset-id') - >>> query = dataset.query().kind('MyKind').filter('property =', 'val') + >>> query = Query(dataset, 'MyKind').filter('property =', 'val') Using the `fetch`` method... diff --git a/gcloud/datastore/dataset.py b/gcloud/datastore/dataset.py index 8ec22035c929..f3ee10185fd0 100644 --- a/gcloud/datastore/dataset.py +++ b/gcloud/datastore/dataset.py @@ -1,5 +1,7 @@ """Create / interact with gcloud datastore datasets.""" +from gcloud.datastore import _helpers + class Dataset(object): """A dataset in the Cloud Datastore. @@ -60,34 +62,6 @@ def id(self): return self._id - def query(self, *args, **kwargs): - """Create a query bound to this dataset. - - :param args: positional arguments, passed through to the Query - - :param kw: keyword arguments, passed through to the Query - - :rtype: :class:`gcloud.datastore.query.Query` - :returns: a new Query instance, bound to this dataset. - """ - # This import is here to avoid circular references. - from gcloud.datastore.query import Query - kwargs['dataset'] = self - return Query(*args, **kwargs) - - def entity(self, kind): - """Create an entity bound to this dataset. - - :type kind: string - :param kind: the "kind" of the new entity. - - :rtype: :class:`gcloud.datastore.entity.Entity` - :returns: a new Entity instance, bound to this dataset. - """ - # This import is here to avoid circular references. - from gcloud.datastore.entity import Entity - return Entity(dataset=self, kind=kind) - def transaction(self, *args, **kwargs): """Create a transaction bound to this dataset. @@ -98,10 +72,8 @@ def transaction(self, *args, **kwargs): :rtype: :class:`gcloud.datastore.transaction.Transaction` :returns: a new Transaction instance, bound to this dataset. """ - # This import is here to avoid circular references. - from gcloud.datastore.transaction import Transaction kwargs['dataset'] = self - return Transaction(*args, **kwargs) + return _helpers._FACTORIES.invoke('Transaction', *args, **kwargs) def get_entity(self, key): """Retrieves entity from the dataset, along with its attributes. @@ -125,9 +97,6 @@ def get_entities(self, keys): :rtype: list of :class:`gcloud.datastore.entity.Entity` :return: The requested entities. """ - # This import is here to avoid circular references. - from gcloud.datastore.entity import Entity - entity_pbs = self.connection().lookup( dataset_id=self.id(), key_pbs=[k.to_protobuf() for k in keys] @@ -135,5 +104,9 @@ def get_entities(self, keys): entities = [] for entity_pb in entity_pbs: - entities.append(Entity.from_protobuf(entity_pb, dataset=self)) + entities.append(_helpers._FACTORIES.invoke( + 'Entity_from_protobuf', entity_pb, dataset=self)) return entities + + +_helpers._FACTORIES.register('Dataset', Dataset) diff --git a/gcloud/datastore/demo/demo.py b/gcloud/datastore/demo/demo.py index 4d511fa6cb44..67eaf11ba2a4 100644 --- a/gcloud/datastore/demo/demo.py +++ b/gcloud/datastore/demo/demo.py @@ -6,10 +6,13 @@ # Let's start by importing the demo module and getting a dataset: from gcloud.datastore import demo +from gcloud.datastore.entity import Entity +from gcloud.datastore.query import Query + dataset = demo.get_dataset() # Let's create a new entity of type "Thing" and name it 'Toy': -toy = dataset.entity('Thing') +toy = Entity(dataset, 'Thing') toy.update({'name': 'Toy'}) # Now let's save it to our datastore: @@ -26,7 +29,7 @@ # Now let's try a more advanced query. # We'll start by look at all Thing entities: -query = dataset.query().kind('Thing') +query = Query(dataset, 'Thing') # Let's look at the first two. print query.limit(2).fetch() @@ -42,13 +45,13 @@ # (Check the official docs for explanations of what's happening here.) with dataset.transaction(): print 'Creating and savng an entity...' - thing = dataset.entity('Thing') + thing = Entity(dataset, 'Thing') thing.key(thing.key().name('foo')) thing['age'] = 10 thing.save() print 'Creating and saving another entity...' - thing2 = dataset.entity('Thing') + thing2 = Entity(dataset, 'Thing') thing2.key(thing2.key().name('bar')) thing2['age'] = 15 thing2.save() @@ -60,7 +63,7 @@ # To rollback a transaction, just call .rollback() with dataset.transaction() as t: - thing = dataset.entity('Thing') + thing = Entity(dataset, 'Thing') thing.key(thing.key().name('another')) thing.save() t.rollback() @@ -72,7 +75,7 @@ # Remember, a key won't be complete until the transaction is commited. # That is, while inside the transaction block, thing.key() will be incomplete. with dataset.transaction(): - thing = dataset.entity('Thing') + thing = Entity(dataset, 'Thing') thing.save() print thing.key() # This will be partial diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 88ecf1324ff2..a457304d0ac8 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -16,7 +16,7 @@ """ from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore.key import Key +from gcloud.datastore import _helpers class NoKey(RuntimeError): @@ -39,15 +39,10 @@ class Entity(dict): This means you could take an existing entity and change the key to duplicate the object. - This can be used on its own, however it is likely easier to use - the shortcut methods provided by :class:`gcloud.datastore.dataset.Dataset` + Entities can be constructed directly, or obtained via + the "lookup" methods provided by :class:`gcloud.datastore.dataset.Dataset` such as: - - :func:`gcloud.datastore.dataset.Dataset.entity` to create a new entity. - - >>> dataset.entity('MyEntityKind') - - - :func:`gcloud.datastore.dataset.Dataset.get_entity` to retrieve an existing entity. @@ -74,7 +69,7 @@ def __init__(self, dataset=None, kind=None): super(Entity, self).__init__() self._dataset = dataset if kind: - self._key = Key().kind(kind) + self._key = _helpers._FACTORIES.invoke('Key').kind(kind) else: self._key = None @@ -159,11 +154,7 @@ def from_protobuf(cls, pb, dataset=None): :returns: The :class:`Entity` derived from the :class:`gcloud.datastore.datastore_v1_pb2.Entity`. """ - - # This is here to avoid circular imports. - from gcloud.datastore import _helpers - - key = Key.from_protobuf(pb.key) + key = _helpers._FACTORIES.invoke('Key_from_protobuf', pb.key) entity = cls.from_key(key, dataset) for property_pb in pb.property: @@ -248,7 +239,8 @@ def save(self): transaction.add_auto_id_entity(self) if isinstance(key_pb, datastore_pb.Key): - updated_key = Key.from_protobuf(key_pb) + updated_key = _helpers._FACTORIES.invoke( + 'Key_from_protobuf', key_pb) # Update the path (which may have been altered). self._key = key.path(updated_key.path()) @@ -275,3 +267,7 @@ def __repr__(self): super(Entity, self).__repr__()) else: return '' % (super(Entity, self).__repr__()) + + +_helpers._FACTORIES.register('Entity', Entity) +_helpers._FACTORIES.register('Entity_from_protobuf', Entity.from_protobuf) diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index be28c4db42e7..f18d4ef96dd6 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -4,6 +4,7 @@ from itertools import izip from gcloud.datastore import datastore_v1_pb2 as datastore_pb +from gcloud.datastore import _helpers class Key(object): @@ -251,3 +252,8 @@ def parent(self): def __repr__(self): return '' % self.path() + + +_helpers._FACTORIES.register('Key', Key) +_helpers._FACTORIES.register('Key_from_protobuf', Key.from_protobuf) +_helpers._FACTORIES.register('Key_from_path', Key.from_path) diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index b556e380b3bd..e550f1aa36e6 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -4,8 +4,6 @@ from gcloud.datastore import datastore_v1_pb2 as datastore_pb from gcloud.datastore import _helpers -from gcloud.datastore.entity import Entity -from gcloud.datastore.key import Key class Query(object): @@ -18,24 +16,13 @@ class Query(object): and a clone is returned whenever any part of the query is modified:: - >>> query = Query('MyKind') + >>> query = Query('MyKind', dataset) >>> limited_query = query.limit(10) >>> query.limit() == 10 False >>> limited_query.limit() == 10 True - You typically won't construct a :class:`Query` - by initializing it like ``Query('MyKind', dataset=...)`` - but instead use the helper - :func:`gcloud.datastore.dataset.Dataset.query` method - which generates a query that can be executed - without any additional work:: - - >>> from gcloud import datastore - >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> query = dataset.query('MyKind') - :type kind: string :param kind: The kind to query. @@ -55,7 +42,7 @@ class Query(object): } """Mapping of operator strings and their protobuf equivalents.""" - def __init__(self, kind=None, dataset=None, namespace=None): + def __init__(self, dataset=None, kind=None, namespace=None): self._dataset = dataset self._namespace = namespace self._pb = datastore_pb.Query() @@ -163,13 +150,12 @@ def ancestor(self, ancestor): For example:: >>> parent_key = Key.from_path('Person', '1') - >>> query = dataset.query('Person') + >>> query = Query('Person', dataset) >>> filtered_query = query.ancestor(parent_key) If you don't have a :class:`gcloud.datastore.key.Key` but just know the path, you can provide that as well:: - >>> query = dataset.query('Person') >>> filtered_query = query.ancestor(['Person', '1']) Each call to ``.ancestor()`` returns a cloned :class:`Query`, @@ -203,10 +189,10 @@ def ancestor(self, ancestor): # If a list was provided, turn it into a Key. if isinstance(ancestor, list): - ancestor = Key.from_path(*ancestor) + ancestor = _helpers._FACTORIES.invoke('Key_from_path', *ancestor) # If we don't have a Key value by now, something is wrong. - if not isinstance(ancestor, Key): + if not isinstance(ancestor, _helpers._FACTORIES.get('Key')): raise TypeError('Expected list or Key, got %s.' % type(ancestor)) # Get the composite filter and add a new property filter. @@ -306,7 +292,7 @@ def fetch(self, limit=None): >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) - >>> query = dataset.query('Person').filter('name =', 'Sally') + >>> query = Query('Person', dataset).filter('name =', 'Sally') >>> query.fetch() [, , ...] >>> query.fetch(1) @@ -336,7 +322,8 @@ def fetch(self, limit=None): entity_pbs, end_cursor = query_results[:2] self._cursor = end_cursor - return [Entity.from_protobuf(entity, dataset=self.dataset()) + return [_helpers._FACTORIES.invoke('Entity_from_protobuf', entity, + dataset=self.dataset()) for entity in entity_pbs] def cursor(self): @@ -404,3 +391,6 @@ def order(self, *properties): property_order.direction = property_order.ASCENDING return clone + + +_helpers._FACTORIES.register('Query', Query) diff --git a/gcloud/datastore/test__helpers.py b/gcloud/datastore/test__helpers.py index 84ea41ea1ec0..cde28b64e171 100644 --- a/gcloud/datastore/test__helpers.py +++ b/gcloud/datastore/test__helpers.py @@ -1,6 +1,93 @@ import unittest2 +class Test_FactoryRegistry(unittest2.TestCase): + + def setUp(self): + self._widget = object() + self._called_with = [] + + def _makeOne(self): + from gcloud.datastore._helpers import _FACTORIES + return type(_FACTORIES)() + + def _factory(self, *args, **kw): # pragma: NO COVER + self._called_with.append((args, kw)) + return self._widget + + def test_register(self): + + class _Foo(object): + pass + + factories = self._makeOne() + factories.register('Foo', _Foo) + self.assertTrue(factories.get('Foo') is _Foo) + + def test_register_duplicate_exact(self): + + class _Foo(object): + pass + + factories = self._makeOne() + factories.register('Foo', _Foo) + factories.register('Foo', _Foo) + self.assertTrue(factories.get('Foo') is _Foo) + + def test_register_duplicate_conflict(self): + from gcloud.datastore._helpers import DuplicateFactory + + class _Foo(object): + pass + + class _Bar(object): + pass + + factories = self._makeOne() + factories.register('Foo', _Foo) + self.assertRaises(DuplicateFactory, factories.register, 'Foo', _Bar) + + def test_get_miss(self): + from gcloud.datastore._helpers import InvalidFactory + + factories = self._makeOne() + self.assertRaises(InvalidFactory, factories.get, 'Nonesuch') + + def test_get_hit(self): + factories = self._makeOne() + + # Use a bare function to avoid method wrappers for testing. + def _bare_factory(): # pragma: NO COVER + pass + + factories.register('Widget', _bare_factory) + self.assertTrue(factories.get('Widget') is _bare_factory) + + def test_invoke_miss(self): + from gcloud.datastore._helpers import InvalidFactory + + factories = self._makeOne() + self.assertRaises(InvalidFactory, factories.invoke, 'Nonesuch') + + def test_invoke_wo_args_or_kw(self): + factories = self._makeOne() + factories.register('Widget', self._factory) + self.assertTrue(factories.invoke('Widget') is self._widget) + self.assertEqual(self._called_with, [((), {})]) + + def test_invoke_w_args(self): + factories = self._makeOne() + factories.register('Widget', self._factory) + self.assertTrue(factories.invoke('Widget', 'foo', 42) is self._widget) + self.assertEqual(self._called_with, [(('foo', 42), {})]) + + def test_invoke_w_kw(self): + factories = self._makeOne() + factories.register('Widget', self._factory) + self.assertTrue(factories.invoke('Widget', foo=42) is self._widget) + self.assertEqual(self._called_with, [((), {'foo': 42})]) + + class Test__get_protobuf_attribute_and_value(unittest2.TestCase): def _callFUT(self, val): diff --git a/gcloud/datastore/test_connection.py b/gcloud/datastore/test_connection.py index e63aae1e6449..a286255ee291 100644 --- a/gcloud/datastore/test_connection.py +++ b/gcloud/datastore/test_connection.py @@ -313,7 +313,7 @@ def test_run_query_wo_namespace_empty_result(self): DATASET_ID = 'DATASET' KIND = 'Nonesuch' - q_pb = Query(KIND, DATASET_ID).to_protobuf() + q_pb = Query(kind=KIND).to_protobuf() rsp_pb = datastore_pb.RunQueryResponse() conn = self._makeOne() URI = '/'.join([ @@ -349,7 +349,7 @@ def test_run_query_w_namespace_nonempty_result(self): DATASET_ID = 'DATASET' KIND = 'Kind' entity = datastore_pb.Entity() - q_pb = Query(KIND, DATASET_ID).to_protobuf() + q_pb = Query(kind=KIND).to_protobuf() rsp_pb = datastore_pb.RunQueryResponse() rsp_pb.batch.entity_result.add(entity=entity) rsp_pb.batch.entity_result_type = 1 # FULL diff --git a/gcloud/datastore/test_dataset.py b/gcloud/datastore/test_dataset.py index 588499c2f41a..56cb142837be 100644 --- a/gcloud/datastore/test_dataset.py +++ b/gcloud/datastore/test_dataset.py @@ -26,23 +26,6 @@ def test_ctor_explicit(self): self.assertEqual(dataset.id(), DATASET_ID) self.assertTrue(dataset.connection() is CONNECTION) - def test_query_factory(self): - from gcloud.datastore.query import Query - DATASET_ID = 'DATASET' - dataset = self._makeOne(DATASET_ID) - query = dataset.query() - self.assertIsInstance(query, Query) - self.assertTrue(query.dataset() is dataset) - - def test_entity_factory(self): - from gcloud.datastore.entity import Entity - DATASET_ID = 'DATASET' - KIND = 'KIND' - dataset = self._makeOne(DATASET_ID) - entity = dataset.entity(KIND) - self.assertIsInstance(entity, Entity) - self.assertEqual(entity.kind(), KIND) - def test_transaction_factory(self): from gcloud.datastore.transaction import Transaction DATASET_ID = 'DATASET' diff --git a/gcloud/datastore/test_query.py b/gcloud/datastore/test_query.py index eeac85676e96..15cef3585b53 100644 --- a/gcloud/datastore/test_query.py +++ b/gcloud/datastore/test_query.py @@ -8,8 +8,8 @@ def _getTargetClass(self): return Query - def _makeOne(self, kind=None, dataset=None, namespace=None): - return self._getTargetClass()(kind, dataset, namespace) + def _makeOne(self, dataset=None, kind=None, namespace=None): + return self._getTargetClass()(dataset, kind, namespace) def test_ctor_defaults(self): query = self._getTargetClass()() @@ -25,7 +25,7 @@ def test_ctor_explicit(self): _KIND = 'KIND' _NAMESPACE = 'NAMESPACE' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND, dataset, _NAMESPACE) + query = self._makeOne(dataset, _KIND, _NAMESPACE) self.assertTrue(query.dataset() is dataset) kq_pb, = list(query.kind()) self.assertEqual(kq_pb.name, _KIND) @@ -39,7 +39,7 @@ def test__clone(self): _CURSOR = 'DEADBEEF' _NAMESPACE = 'NAMESPACE' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND, dataset, _NAMESPACE) + query = self._makeOne(dataset, _KIND, _NAMESPACE) query._cursor = _CURSOR clone = query._clone() self.assertFalse(clone is query) @@ -58,7 +58,7 @@ def test_to_protobuf_empty(self): def test_to_protobuf_w_kind(self): _KIND = 'KIND' - query = self._makeOne(_KIND) + query = self._makeOne(kind=_KIND) q_pb = query.to_protobuf() kq_pb, = list(q_pb.kind) self.assertEqual(kq_pb.name, _KIND) @@ -189,7 +189,7 @@ def test_kind_setter_wo_existing(self): _DATASET = 'DATASET' _KIND = 'KIND' dataset = Dataset(_DATASET) - query = self._makeOne(dataset=dataset) + query = self._makeOne(dataset) after = query.kind(_KIND) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -203,7 +203,7 @@ def test_kind_setter_w_existing(self): _KIND_BEFORE = 'KIND_BEFORE' _KIND_AFTER = 'KIND_AFTER' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND_BEFORE, dataset) + query = self._makeOne(dataset, _KIND_BEFORE) after = query.kind(_KIND_AFTER) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -218,7 +218,7 @@ def test_limit_setter_wo_existing(self): _KIND = 'KIND' _LIMIT = 42 dataset = Dataset(_DATASET) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.limit(_LIMIT) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -232,7 +232,7 @@ def test_dataset_setter(self): _DATASET = 'DATASET' _KIND = 'KIND' dataset = Dataset(_DATASET) - query = self._makeOne(_KIND) + query = self._makeOne(kind=_KIND) after = query.dataset(dataset) self.assertFalse(after is query) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -254,7 +254,7 @@ def test_fetch_default_limit(self): prop.value.string_value = u'Foo' connection = _Connection(entity_pb) dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) entities = query.fetch() self.assertEqual(len(entities), 1) self.assertEqual(entities[0].key().path(), @@ -283,7 +283,7 @@ def test_fetch_explicit_limit(self): connection = _Connection(entity_pb) connection._cursor = _CURSOR dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset, _NAMESPACE) + query = self._makeOne(dataset, _KIND, _NAMESPACE) limited = query.limit(13) entities = query.fetch(13) self.assertEqual(query._cursor, _CURSOR) @@ -302,7 +302,7 @@ def test_cursor_not_fetched(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) self.assertRaises(RuntimeError, query.cursor) def test_cursor_fetched(self): @@ -312,7 +312,7 @@ def test_cursor_fetched(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) query._cursor = _CURSOR self.assertEqual(query.cursor(), base64.b64encode(_CURSOR)) @@ -321,7 +321,7 @@ def test_with_cursor_neither(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) self.assertTrue(query.with_cursor(None) is query) def test_with_cursor_w_start(self): @@ -332,7 +332,7 @@ def test_with_cursor_w_start(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.with_cursor(_CURSOR_B64) self.assertFalse(after is query) q_pb = after.to_protobuf() @@ -347,7 +347,7 @@ def test_with_cursor_w_end(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.with_cursor(None, _CURSOR_B64) self.assertFalse(after is query) q_pb = after.to_protobuf() @@ -364,7 +364,7 @@ def test_with_cursor_w_both(self): _KIND = 'KIND' connection = _Connection() dataset = _Dataset(_DATASET, connection) - query = self._makeOne(_KIND, dataset) + query = self._makeOne(dataset, _KIND) after = query.with_cursor(_START_B64, _END_B64) self.assertFalse(after is query) q_pb = after.to_protobuf() @@ -373,7 +373,7 @@ def test_with_cursor_w_both(self): def test_order_empty(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order() self.assertFalse(after is before) self.assertTrue(isinstance(after, self._getTargetClass())) @@ -381,7 +381,7 @@ def test_order_empty(self): def test_order_single_asc(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order('field') after_pb = after.to_protobuf() order_pb = list(after_pb.order) @@ -392,7 +392,7 @@ def test_order_single_asc(self): def test_order_single_desc(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order('-field') after_pb = after.to_protobuf() order_pb = list(after_pb.order) @@ -403,7 +403,7 @@ def test_order_single_desc(self): def test_order_multiple(self): _KIND = 'KIND' - before = self._makeOne(_KIND) + before = self._makeOne(kind=_KIND) after = before.order('foo', '-bar') after_pb = after.to_protobuf() order_pb = list(after_pb.order) diff --git a/gcloud/datastore/transaction.py b/gcloud/datastore/transaction.py index d20f35a686d4..f5425ea49c4a 100644 --- a/gcloud/datastore/transaction.py +++ b/gcloud/datastore/transaction.py @@ -1,7 +1,7 @@ """Create / interact with gcloud datastore transactions.""" from gcloud.datastore import datastore_v1_pb2 as datastore_pb -from gcloud.datastore.key import Key +from gcloud.datastore import _helpers class Transaction(object): @@ -42,8 +42,9 @@ class Transaction(object): That means, if you try:: + >>> from gcloud.datastore.entity import Entity >>> with dataset.transaction(): - ... entity = dataset.entity('Thing').save() + ... entity = Entity(dataset, 'Thing').save() ``entity`` won't have a complete Key until the transaction is committed. @@ -53,7 +54,7 @@ class Transaction(object): to the entity:: >>> with dataset.transaction(): - ... entity = dataset.entity('Thing') + ... entity = Entity(dataset, 'Thing').save() ... entity.save() ... assert entity.key().is_partial() # There is no ID on this key. >>> assert not entity.key().is_partial() # There *is* an ID. @@ -75,7 +76,7 @@ class Transaction(object): >>> transaction = dataset.transaction() >>> transaction.begin() - >>> entity = dataset.entity('Thing') + >>> entity = Entity(dataset, 'Thing') >>> entity.save() >>> if error: @@ -94,19 +95,17 @@ class Transaction(object): For example, this is perfectly valid:: - >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset.transaction(): - ... dataset.entity('Thing').save() + ... Entity(dataset, 'Thing').save() However, this **wouldn't** be acceptable:: - >>> from gcloud import datastore >>> dataset = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset.transaction(): - ... dataset.entity('Thing').save() - ... with dataset.transaction(): - ... dataset.entity('Thing').save() + ... Entity(dataset, 'Thing').save() + ... with dataset.transaction(): + ... Entity(dataset, 'Thing').save() Technically, it looks like the Protobuf API supports this type of pattern, however it makes the code particularly messy. @@ -116,9 +115,9 @@ class Transaction(object): >>> dataset1 = datastore.get_dataset('dataset-id', email, key_path) >>> dataset2 = datastore.get_dataset('dataset-id', email, key_path) >>> with dataset1.transaction(): - ... dataset1.entity('Thing').save() - ... with dataset2.transaction(): - ... dataset2.entity('Thing').save() + ... Entity(dataset1, 'Thing').save() + ... with dataset2.transaction(): + ... Entity(dataset2, 'Thing').save() :type dataset: :class:`gcloud.datastore.dataset.Dataset` :param dataset: The dataset to which this :class:`Transaction` belongs. @@ -236,7 +235,9 @@ def commit(self): # For any of the auto-id entities, make sure we update their keys. for i, entity in enumerate(self._auto_id_entities): key_pb = result.insert_auto_id_key[i] - key = Key.from_protobuf(key_pb) + # We can ignore the 'dataset' parameter ecause we are only + # creating a transient instance in order to copy its path. + key = _helpers._FACTORIES.invoke('Key_from_protobuf', key_pb) entity.key(entity.key().path(key.path())) # Tell the connection that the transaction is over. @@ -254,3 +255,6 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.commit() else: self.rollback() + + +_helpers._FACTORIES.register('Transaction', Transaction) diff --git a/regression/datastore.py b/regression/datastore.py index 047b6f586dfe..4a4f42ac5c3b 100644 --- a/regression/datastore.py +++ b/regression/datastore.py @@ -30,6 +30,7 @@ def _get_dataset(self): return self._datasets[self._dataset_id] def _get_post(self, name=None, key_id=None, post_content=None): + from gcloud.datastore.entity import Entity post_content = post_content or { 'title': 'How to make the perfect pizza in your grill', 'tags': ['pizza', 'grill'], @@ -44,7 +45,7 @@ def _get_post(self, name=None, key_id=None, post_content=None): } # Create an entity with the given content in our dataset. dataset = self._get_dataset() - entity = dataset.entity(kind='Post') + entity = Entity(dataset, 'Post') entity.update(post_content) # Update the entity key. @@ -114,5 +115,7 @@ def test_save_multiple(self): self.assertEqual(len(matches), 2) def test_empty_kind(self): - posts = self._get_dataset().query().kind('Post').limit(2).fetch() + from gcloud.datastore.query import Query + dataset = self._get_dataset() + posts = Query(dataset=dataset).kind('Post').limit(2).fetch() self.assertEqual(posts, [])