Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix #141: remove spurious *args / **kwargs from API methods. #195

Closed
wants to merge 32 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
a7e8864
Fix #133: preserve PB namespace in Key.from_protobuf.
tseaver Sep 24, 2014
a077d7d
Add docstrings for API method of g.datastore.key.Key.
tseaver Sep 24, 2014
0e625da
Fix #134: enforce contract that *args must be pairs.
tseaver Sep 24, 2014
fe1450b
Propose a contract for Key.parent.
tseaver Sep 24, 2014
c8f6aa4
Fix #135: enforce contract for Key.parent().
tseaver Sep 24, 2014
f107fda
Incorporate feedback from @dhermes.
tseaver Sep 25, 2014
7076c4d
Merge branch 'add_key_docstrings' into 133-key.from_protobuf-preserve…
tseaver Sep 25, 2014
19c6382
Merge branch 'add_key_docstrings' into 134-key-from_path-contract
tseaver Sep 25, 2014
4500f5e
Merge branch 'add_key_docstrings' into 135-key-parent-contract
tseaver Sep 25, 2014
37dd328
Incorporate feedback from @dhermes.
tseaver Sep 25, 2014
d1280f3
Incorporate feedback from @dhermes.
tseaver Sep 25, 2014
8ce18c9
Merge branch 'add_key_docstrings' into 135-key-parent-contract
tseaver Sep 25, 2014
e6ad297
Fix #137: default key based on filename.
tseaver Sep 26, 2014
fb756b2
Fix #138: clear existing (local) ACL when saving ACL to server.
tseaver Sep 26, 2014
2f93065
Added bucket method to upload files from a file object.
Sep 21, 2014
e19cf86
Fix #139: clear existing (local) ACL when saving defaultObjectACL to…
tseaver Sep 26, 2014
ccd8a5e
Fix #140: make 'path' required for Connection.api_request.
tseaver Sep 26, 2014
e346282
Fix #141: remove spurious *args / **kwargs from API methods.
tseaver Sep 26, 2014
be6e9b7
Fix #143: off-by-one range in iterator.KeyIterator.get_headers.
tseaver Sep 26, 2014
0a5fe6b
Merge pull request #184 from tseaver/add_key_docstrings
silvolu Sep 29, 2014
5c3d8ae
Merge pull request #198 from dhermes/datastore-cnxn-inherit-generic
silvolu Sep 29, 2014
62e887d
Merge pull request #145 from kleyow/files
silvolu Sep 30, 2014
60887ab
Merge pull request #191 from tseaver/137-upload_file-guess_key_based_…
silvolu Sep 30, 2014
23fc80f
Merge pull request #192 from tseaver/138-save_acl-clear_existing_loca…
silvolu Sep 30, 2014
d8b4e36
Merge pull request #193 from tseaver/139-save_default_object_acl-clea…
silvolu Sep 30, 2014
3bd6bd8
Merge pull request #194 from tseaver/140-connection.api_request-handl…
silvolu Sep 30, 2014
bc53038
Merge pull request #196 from tseaver/143-storage.iterator.KeyDataIter…
silvolu Sep 30, 2014
77e5f0b
Merge pull request #183 from tseaver/133-key.from_protobuf-preserve-n…
silvolu Sep 30, 2014
1bb281c
Merge pull request #185 from tseaver/134-key-from_path-contract
silvolu Sep 30, 2014
9af7d1b
Merge pull request #186 from tseaver/135-key-parent-contract
silvolu Sep 30, 2014
b848468
Fix #141: remove spurious *args / **kwargs from API methods.
tseaver Sep 26, 2014
0ba7398
Merge branch '141-connection.get_bucket-spurious_args_kwargs' of gith…
tseaver Oct 1, 2014
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 132 additions & 3 deletions gcloud/datastore/key.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,18 @@ class Key(object):
"""

def __init__(self, dataset=None, namespace=None, path=None):
"""Constructor / initializer for a key.

:type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: A dataset instance for the key.

:type namespace: :class:`str`
:param namespace: A namespace identifier for the key.

:type path: sequence of dicts
:param path: Each dict must have keys 'kind' (a string) and optionally
'name' (a string) or 'id' (an integer).
"""
self._dataset = dataset
self._namespace = namespace
self._path = path or [{'kind': ''}]
Expand All @@ -21,13 +33,30 @@ def _clone(self):
We make a shallow copy of the :class:`gcloud.datastore.dataset.Dataset`
because it holds a reference an authenticated connection,
which we don't want to lose.

:rtype: :class:`gcloud.datastore.key.Key`
:returns: a new `Key` instance
"""
clone = copy.deepcopy(self)
clone._dataset = self._dataset # Make a shallow copy of the Dataset.
return clone

@classmethod
def from_protobuf(cls, pb, dataset=None):
"""Factory method for creating a key based on a protobuf.

The protobuf should be one returned from the Cloud Datastore Protobuf API.

:type pb: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:param pb: The Protobuf representing the key.

:type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: A dataset instance. If not passed, defaults to an
instance whose ID is derived from pb.

:rtype: :class:`gcloud.datastore.key.Key`
:returns: a new `Key` instance
"""
path = []
for element in pb.path_element:
element_dict = {'kind': element.kind}
Expand All @@ -42,10 +71,18 @@ def from_protobuf(cls, pb, dataset=None):

if not dataset:
dataset = Dataset(id=pb.partition_id.dataset_id)
namespace = pb.partition_id.namespace
else:
namespace = None

return cls(path=path, dataset=dataset)
return cls(dataset, namespace, path)

def to_protobuf(self):
"""Return a protobuf corresponding to the key.

:rtype: :class:`gcloud.datastore.datastore_v1_pb2.Key`
:returns: The Protobuf representing the key.
"""
key = datastore_pb.Key()

# Technically a dataset is required to do anything with the key,
Expand Down Expand Up @@ -77,6 +114,23 @@ def to_protobuf(self):

@classmethod
def from_path(cls, *args, **kwargs):
"""Factory method for creating a key based on a path.

:type args: :class:`tuple
:param args: sequence of even length, where the first of each
pair is a string representing the 'kind' of the path element, and the
second of the pair is either a string (for the path element's name)
or an integer (for its id).

:type kwargs: :class:`dict`
:param kwargs: Other named parameters which can be passed to `__init__()`.

:rtype: :class:`gcloud.datastore.key.Key`
:returns: a new `Key` instance
"""
if len(args) % 2:
raise ValueError('Must pass an even number of args.')

path = []
items = iter(args)

Expand All @@ -92,9 +146,25 @@ def from_path(cls, *args, **kwargs):
return cls(**kwargs)

def is_partial(self):
"""Boolean test: is the key fully mapped onto a backend entity?

:rtype: :class:`bool`
:returns: True if the last element of the key's path does not have an 'id'
or a 'name'.
"""
return (self.id_or_name() is None)

def dataset(self, dataset=None):
"""Setter / getter.

:type dataset: :class:`gcloud.datastore.dataset.Dataset`
:param dataset: A dataset instance for the key.

:rtype: :class:`Key` (for setter); or
:class:`gcloud.datastore.dataset.Dataset` (for getter)
:returns: a new key, cloned from self., with the given dataset (setter);
or self's dataset (getter).
"""
if dataset:
clone = self._clone()
clone._dataset = dataset
Expand All @@ -103,6 +173,15 @@ def dataset(self, dataset=None):
return self._dataset

def namespace(self, namespace=None):
"""Setter / getter.

:type namespace: :class:`str`
:param namespace: A namespace identifier for the key.

:rtype: :class:`Key` (for setter); or :class:`str` (for getter)
:returns: a new key, cloned from self., with the given namespace (setter);
or self's namespace (getter).
"""
if namespace:
clone = self._clone()
clone._namespace = namespace
Expand All @@ -111,6 +190,16 @@ def namespace(self, namespace=None):
return self._namespace

def path(self, path=None):
"""Setter / getter.

:type path: sequence of dicts
:param path: Each dict must have keys 'kind' (a string) and optionally
'name' (a string) or 'id' (an integer).

:rtype: :class:`Key` (for setter); or :class:`str` (for getter)
:returns: a new key, cloned from self., with the given path (setter);
or self's path (getter).
"""
if path:
clone = self._clone()
clone._path = path
Expand All @@ -119,6 +208,15 @@ def path(self, path=None):
return self._path

def kind(self, kind=None):
"""Setter / getter. Based on the last element of path.

:type kind: :class:`str`
:param kind: The new kind for the key.

:rtype: :class:`Key` (for setter); or :class:`str` (for getter)
:returns: a new key, cloned from self., with the given kind (setter);
or self's kind (getter).
"""
if kind:
clone = self._clone()
clone._path[-1]['kind'] = kind
Expand All @@ -127,6 +225,15 @@ def kind(self, kind=None):
return self._path[-1]['kind']

def id(self, id=None):
"""Setter / getter. Based on the last element of path.

:type kind: :class:`str`
:param kind: The new kind for the key.

:rtype: :class:`Key` (for setter); or :class:`int` (for getter)
:returns: a new key, cloned from self., with the given id (setter);
or self's id (getter).
"""
if id:
clone = self._clone()
clone._path[-1]['id'] = id
Expand All @@ -135,6 +242,15 @@ def id(self, id=None):
return self._path[-1].get('id')

def name(self, name=None):
"""Setter / getter. Based on the last element of path.

:type kind: :class:`str`
:param kind: The new name for the key.

:rtype: :class:`Key` (for setter); or :class:`str` (for getter)
:returns: a new key, cloned from self., with the given name (setter);
or self's name (getter).
"""
if name:
clone = self._clone()
clone._path[-1]['name'] = name
Expand All @@ -143,11 +259,24 @@ def name(self, name=None):
return self._path[-1].get('name')

def id_or_name(self):
"""Getter. Based on the last element of path.

:rtype: :class:`int` (if 'id' is set); or :class:`str` (the 'name')
:returns: True if the last element of the key's path has either an 'id'
or a 'name'.
"""
return self.id() or self.name()

def parent(self):#pragma NO COVER
# See https://github.com/GoogleCloudPlatform/gcloud-python/issues/135
raise NotImplementedError
"""Getter: return a new key for the next highest element in path.

:rtype: :class:`gcloud.datastore.key.Key`
:returns: a new `Key` instance, whose path consists of all but the last
element of self's path. If self has only one path element, return None.
"""
if len(self._path) <= 1:
return None
return self.path(self.path()[:-1])

def __repr__(self): #pragma NO COVER
return '<Key%s>' % self.path()
39 changes: 27 additions & 12 deletions gcloud/datastore/test_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,20 @@ def test_from_protobuf_w_dataset_in_pb(self):
key = self._getTargetClass().from_protobuf(pb)
self.assertEqual(key.dataset().id(), _DATASET)

def test_from_protobuf_w_namespace_in_pb(self):
def test_from_protobuf_w_namespace_in_pb_wo_dataset_passed(self):
_NAMESPACE = 'NAMESPACE'
pb = self._makePB(namespace=_NAMESPACE)
key = self._getTargetClass().from_protobuf(pb)
# See: https://github.com/GoogleCloudPlatform/gcloud-python/issues/133
# self.assertEqual(key.namespace(), _NAMESPACE) XXX
self.assertEqual(key.namespace(), _NAMESPACE)

def test_from_protobuf_w_namespace_in_pb_w_dataset_passed(self):
from gcloud.datastore.dataset import Dataset
_DATASET = 'DATASET'
_NAMESPACE = 'NAMESPACE'
dataset = Dataset(_DATASET)
pb = self._makePB(namespace=_NAMESPACE)
key = self._getTargetClass().from_protobuf(pb, dataset)
self.assertEqual(key.namespace(), None)

def test_from_protobuf_w_path_in_pb(self):
_DATASET = 'DATASET'
Expand Down Expand Up @@ -164,12 +172,11 @@ def test_from_path_empty(self):
self.assertEqual(key.path(), [{'kind': ''}])

def test_from_path_single_element(self):
# See https://github.com/GoogleCloudPlatform/gcloud-python/issues/134
key = self._getTargetClass().from_path('abc')
self.assertEqual(key.dataset(), None)
self.assertEqual(key.namespace(), None)
self.assertEqual(key.kind(), '') # XXX s.b. 'abc'?
self.assertEqual(key.path(), [{'kind': ''}]) # XXX s.b. 'abc'?
self.assertRaises(ValueError, self._getTargetClass().from_path, 'abc')

def test_from_path_three_elements(self):
self.assertRaises(ValueError, self._getTargetClass().from_path,
'abc', 'def', 'ghi')

def test_from_path_two_elements_second_string(self):
key = self._getTargetClass().from_path('abc', 'def')
Expand Down Expand Up @@ -339,6 +346,14 @@ def test_id_or_name_w_name_only(self):
key = self._makeOne(path=_PATH)
self.assertEqual(key.id_or_name(), _NAME)

def _ugh(self):
protokey = key.to_protobuf()
self.assertEqual(protokey.partition_id.dataset_id, _DATASET)
def test_parent_default(self):
key = self._makeOne()
self.assertEqual(key.parent(), None)

def test_parent_explicit_top_level(self):
key = self._getTargetClass().from_path('abc', 'def')
self.assertEqual(key.parent(), None)

def test_parent_explicit_nested(self):
key = self._getTargetClass().from_path('abc', 'def', 'ghi', 123)
self.assertEqual(key.parent().path(), [{'kind': 'abc', 'name': 'def'}])
60 changes: 57 additions & 3 deletions gcloud/storage/bucket.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import os

from gcloud.storage import exceptions
from gcloud.storage.acl import BucketACL
from gcloud.storage.acl import DefaultObjectACL
Expand Down Expand Up @@ -230,9 +232,56 @@ def upload_file(self, filename, key=None):
to the root of the bucket
with the same name as on your local file system.
"""
if key is None:
key = os.path.basename(filename)
key = self.new_key(key)
return key.set_contents_from_filename(filename)

def upload_file_object(self, fh, key=None):
# TODO: What do we do about overwriting data?
"""Shortcut method to upload a file into this bucket.

Use this method to quickly put a local file in Cloud Storage.

For example::

>>> from gcloud import storage
>>> connection = storage.get_connection(project, email, key_path)
>>> bucket = connection.get_bucket('my-bucket')
>>> bucket.upload_file(open('~/my-file.txt'), 'remote-text-file.txt')
>>> print bucket.get_all_keys()
[<Key: my-bucket, remote-text-file.txt>]

If you don't provide a key value,
we will try to upload the file using the local filename
as the key
(**not** the complete path)::

>>> from gcloud import storage
>>> connection = storage.get_connection(project, email, key_path)
>>> bucket = connection.get_bucket('my-bucket')
>>> bucket.upload_file(open('~/my-file.txt'))
>>> print bucket.get_all_keys()
[<Key: my-bucket, my-file.txt>]

:type fh: file
:param fh: A file handle open for reading.

:type key: string or :class:`gcloud.storage.key.Key`
:param key: The key (either an object or a remote path)
of where to put the file.

If this is blank,
we will try to upload the file
to the root of the bucket
with the same name as on your local file system.
"""
if key:
key = self.new_key(key)
else:
key = self.new_key(os.path.basename(fh.name))
return key.set_contents_from_file(fh)

def has_metadata(self, field=None):
"""Check if metadata is available locally.

Expand Down Expand Up @@ -443,7 +492,9 @@ def save_acl(self, acl=None):
if acl is None:
return self

return self.patch_metadata({'acl': list(acl)})
self.patch_metadata({'acl': list(acl)})
self.reload_acl()
return self

def clear_acl(self):
"""Remove all ACL rules from the bucket.
Expand Down Expand Up @@ -517,12 +568,15 @@ def save_default_object_acl(self, acl=None):
and save that.
"""

acl = acl or self.default_object_acl
if acl is None:
acl = self.default_object_acl

if acl is None:
return self

return self.patch_metadata({'defaultObjectAcl': list(acl)})
self.patch_metadata({'defaultObjectAcl': list(acl)})
self.reload_default_object_acl()
return self

def clear_default_object_acl(self):
"""Remove the Default Object ACL from this bucket."""
Expand Down
Loading