Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Puting helpers in datastore Batch for getting new mutations. #1319

Merged
merged 1 commit into from
Dec 22, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 30 additions & 4 deletions gcloud/datastore/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,33 @@ def connection(self):
"""
return self._client.connection

def _add_partial_key_entity_pb(self):
"""Adds a new mutation for an entity with a partial key.

:rtype: :class:`gcloud.datastore._entity_pb2.Entity`
:returns: The newly created entity protobuf that will be
updated and sent with a commit.
"""
return self.mutations.insert_auto_id.add()

def _add_complete_key_entity_pb(self):
"""Adds a new mutation for an entity with a completed key.

:rtype: :class:`gcloud.datastore._entity_pb2.Entity`
:returns: The newly created entity protobuf that will be
updated and sent with a commit.
"""
return self.mutations.upsert.add()

def _add_delete_key_pb(self):
"""Adds a new mutation for a key to be deleted.

:rtype: :class:`gcloud.datastore._entity_pb2.Key`
:returns: The newly created key protobuf that will be
deleted when sent with a commit.
"""
return self.mutations.delete.add()

@property
def mutations(self):
"""Getter for the changes accumulated by this batch.
Expand Down Expand Up @@ -146,10 +173,10 @@ def put(self, entity):
raise ValueError("Key must be from same dataset as batch")

if entity.key.is_partial:
entity_pb = self.mutations.insert_auto_id.add()
entity_pb = self._add_partial_key_entity_pb()
self._partial_key_entities.append(entity)
else:
entity_pb = self.mutations.upsert.add()
entity_pb = self._add_complete_key_entity_pb()

_assign_entity_to_pb(entity_pb, entity)

Expand All @@ -169,14 +196,13 @@ def delete(self, key):
raise ValueError("Key must be from same dataset as batch")

key_pb = helpers._prepare_key_for_request(key.to_protobuf())
self.mutations.delete.add().CopyFrom(key_pb)
self._add_delete_key_pb().CopyFrom(key_pb)

def begin(self):
"""No-op

Overridden by :class:`gcloud.datastore.transaction.Transaction`.
"""
pass

def commit(self):
"""Commits the batch.
Expand Down
114 changes: 42 additions & 72 deletions gcloud/datastore/test_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,8 @@ def test_put_entity_w_partial_key(self):

batch.put(entity)

insert_auto_ids = list(batch.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 1)
self.assertEqual(insert_auto_ids[0].key, key._key)
upserts = list(batch.mutations.upsert)
self.assertEqual(len(upserts), 0)
deletes = list(batch.mutations.delete)
self.assertEqual(len(deletes), 0)
mutated_entity = _mutated_pb(self, batch.mutations, 'insert_auto_id')
self.assertEqual(mutated_entity.key, key._key)
self.assertEqual(batch._partial_key_entities, [entity])

def test_put_entity_w_completed_key(self):
Expand All @@ -115,14 +110,10 @@ def test_put_entity_w_completed_key(self):

batch.put(entity)

insert_auto_ids = list(batch.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch.mutations.upsert)
self.assertEqual(len(upserts), 1)

upsert = upserts[0]
self.assertEqual(upsert.key, key._key)
props = dict([(prop.name, prop.value) for prop in upsert.property])
mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
self.assertEqual(mutated_entity.key, key._key)
props = dict([(prop.name, prop.value)
for prop in mutated_entity.property])
self.assertTrue(props['foo'].indexed)
self.assertFalse(props['baz'].indexed)
self.assertTrue(props['spam'].indexed)
Expand All @@ -131,9 +122,6 @@ def test_put_entity_w_completed_key(self):
self.assertFalse(props['spam'].list_value[2].indexed)
self.assertFalse('frotz' in props)

deletes = list(batch.mutations.delete)
self.assertEqual(len(deletes), 0)

def test_put_entity_w_completed_key_prefixed_dataset_id(self):
_DATASET = 'DATASET'
_PROPERTIES = {
Expand All @@ -151,14 +139,10 @@ def test_put_entity_w_completed_key_prefixed_dataset_id(self):

batch.put(entity)

insert_auto_ids = list(batch.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch.mutations.upsert)
self.assertEqual(len(upserts), 1)

upsert = upserts[0]
self.assertEqual(upsert.key, key._key)
props = dict([(prop.name, prop.value) for prop in upsert.property])
mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
self.assertEqual(mutated_entity.key, key._key)
props = dict([(prop.name, prop.value)
for prop in mutated_entity.property])
self.assertTrue(props['foo'].indexed)
self.assertFalse(props['baz'].indexed)
self.assertTrue(props['spam'].indexed)
Expand All @@ -167,9 +151,6 @@ def test_put_entity_w_completed_key_prefixed_dataset_id(self):
self.assertFalse(props['spam'].list_value[2].indexed)
self.assertFalse('frotz' in props)

deletes = list(batch.mutations.delete)
self.assertEqual(len(deletes), 0)

def test_delete_w_partial_key(self):
_DATASET = 'DATASET'
connection = _Connection()
Expand Down Expand Up @@ -198,13 +179,8 @@ def test_delete_w_completed_key(self):

batch.delete(key)

insert_auto_ids = list(batch.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch.mutations.upsert)
self.assertEqual(len(upserts), 0)
deletes = list(batch.mutations.delete)
self.assertEqual(len(deletes), 1)
self.assertEqual(deletes[0], key._key)
mutated_key = _mutated_pb(self, batch.mutations, 'delete')
self.assertEqual(mutated_key, key._key)

def test_delete_w_completed_key_w_prefixed_dataset_id(self):
_DATASET = 'DATASET'
Expand All @@ -215,13 +191,8 @@ def test_delete_w_completed_key_w_prefixed_dataset_id(self):

batch.delete(key)

insert_auto_ids = list(batch.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch.mutations.upsert)
self.assertEqual(len(upserts), 0)
deletes = list(batch.mutations.delete)
self.assertEqual(len(deletes), 1)
self.assertEqual(deletes[0], key._key)
mutated_key = _mutated_pb(self, batch.mutations, 'delete')
self.assertEqual(mutated_key, key._key)

def test_commit(self):
_DATASET = 'DATASET'
Expand Down Expand Up @@ -268,13 +239,8 @@ def test_as_context_mgr_wo_error(self):

self.assertEqual(list(client._batches), [])

insert_auto_ids = list(batch.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch.mutations.upsert)
self.assertEqual(len(upserts), 1)
self.assertEqual(upserts[0].key, key._key)
deletes = list(batch.mutations.delete)
self.assertEqual(len(deletes), 0)
mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
self.assertEqual(mutated_entity.key, key._key)
self.assertEqual(connection._committed,
[(_DATASET, batch.mutations, None)])

Expand All @@ -301,21 +267,11 @@ def test_as_context_mgr_nested(self):

self.assertEqual(list(client._batches), [])

insert_auto_ids = list(batch1.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch1.mutations.upsert)
self.assertEqual(len(upserts), 1)
self.assertEqual(upserts[0].key, key1._key)
deletes = list(batch1.mutations.delete)
self.assertEqual(len(deletes), 0)

insert_auto_ids = list(batch2.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch2.mutations.upsert)
self.assertEqual(len(upserts), 1)
self.assertEqual(upserts[0].key, key2._key)
deletes = list(batch2.mutations.delete)
self.assertEqual(len(deletes), 0)
mutated_entity1 = _mutated_pb(self, batch1.mutations, 'upsert')
self.assertEqual(mutated_entity1.key, key1._key)

mutated_entity2 = _mutated_pb(self, batch2.mutations, 'upsert')
self.assertEqual(mutated_entity2.key, key2._key)

self.assertEqual(connection._committed,
[(_DATASET, batch2.mutations, None),
Expand All @@ -341,13 +297,8 @@ def test_as_context_mgr_w_error(self):

self.assertEqual(list(client._batches), [])

insert_auto_ids = list(batch.mutations.insert_auto_id)
self.assertEqual(len(insert_auto_ids), 0)
upserts = list(batch.mutations.upsert)
self.assertEqual(len(upserts), 1)
self.assertEqual(upserts[0].key, key._key)
deletes = list(batch.mutations.delete)
self.assertEqual(len(deletes), 0)
mutated_entity = _mutated_pb(self, batch.mutations, 'upsert')
self.assertEqual(mutated_entity.key, key._key)
self.assertEqual(connection._committed, [])


Expand Down Expand Up @@ -436,3 +387,22 @@ def _pop_batch(self):
def current_batch(self):
if self._batches:
return self._batches[0]


def _assert_num_mutations(test_case, mutation_pb, num_mutations):
total_mutations = (len(mutation_pb.upsert) +
len(mutation_pb.update) +
len(mutation_pb.insert) +
len(mutation_pb.insert_auto_id) +
len(mutation_pb.delete))
test_case.assertEqual(total_mutations, num_mutations)


def _mutated_pb(test_case, mutation_pb, mutation_type):
# Make sure there is only one mutation.
_assert_num_mutations(test_case, mutation_pb, 1)

mutated_pbs = getattr(mutation_pb, mutation_type, [])
# Make sure we have exactly one protobuf.
test_case.assertEqual(len(mutated_pbs), 1)
return mutated_pbs[0]
26 changes: 10 additions & 16 deletions gcloud/datastore/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -637,6 +637,7 @@ def test_put_multi_no_batch_w_partial_key(self):
def test_put_multi_existing_batch_w_completed_key(self):
from gcloud.datastore.test_batch import _Entity
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _mutated_pb

creds = object()
client = self._makeOne(credentials=creds)
Expand All @@ -647,14 +648,11 @@ def test_put_multi_existing_batch_w_completed_key(self):
result = client.put_multi([entity])

self.assertEqual(result, None)
self.assertEqual(len(CURR_BATCH.mutations.insert_auto_id), 0)
upserts = list(CURR_BATCH.mutations.upsert)
self.assertEqual(len(upserts), 1)
self.assertEqual(upserts[0].key, key.to_protobuf())
properties = list(upserts[0].property)
mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, 'upsert')
self.assertEqual(mutated_entity.key, key.to_protobuf())
properties = list(mutated_entity.property)
self.assertEqual(properties[0].name, 'foo')
self.assertEqual(properties[0].value.string_value, u'bar')
self.assertEqual(len(CURR_BATCH.mutations.delete), 0)

def test_delete(self):
_called_with = []
Expand Down Expand Up @@ -698,6 +696,7 @@ def test_delete_multi_no_batch(self):

def test_delete_multi_w_existing_batch(self):
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _mutated_pb

creds = object()
client = self._makeOne(credentials=creds)
Expand All @@ -707,15 +706,13 @@ def test_delete_multi_w_existing_batch(self):
result = client.delete_multi([key])

self.assertEqual(result, None)
self.assertEqual(len(CURR_BATCH.mutations.insert_auto_id), 0)
self.assertEqual(len(CURR_BATCH.mutations.upsert), 0)
deletes = list(CURR_BATCH.mutations.delete)
self.assertEqual(len(deletes), 1)
self.assertEqual(deletes[0], key._key)
mutated_key = _mutated_pb(self, CURR_BATCH.mutations, 'delete')
self.assertEqual(mutated_key, key._key)
self.assertEqual(len(client.connection._commit_cw), 0)

def test_delete_multi_w_existing_transaction(self):
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _mutated_pb

creds = object()
client = self._makeOne(credentials=creds)
Expand All @@ -725,11 +722,8 @@ def test_delete_multi_w_existing_transaction(self):
result = client.delete_multi([key])

self.assertEqual(result, None)
self.assertEqual(len(CURR_XACT.mutations.insert_auto_id), 0)
self.assertEqual(len(CURR_XACT.mutations.upsert), 0)
deletes = list(CURR_XACT.mutations.delete)
self.assertEqual(len(deletes), 1)
self.assertEqual(deletes[0], key._key)
mutated_key = _mutated_pb(self, CURR_XACT.mutations, 'delete')
self.assertEqual(mutated_key, key._key)
self.assertEqual(len(client.connection._commit_cw), 0)

def test_allocate_ids_w_partial_key(self):
Expand Down