diff --git a/gcloud/datastore/connection.py b/gcloud/datastore/connection.py index a32229012d3c..13e299a16f93 100644 --- a/gcloud/datastore/connection.py +++ b/gcloud/datastore/connection.py @@ -322,8 +322,6 @@ def save_entity(self, dataset_id, key_pb, properties): :type properties: dict :param properties: The properties to store on the entity. """ - # TODO: Is this the right method name? - # TODO: How do you delete properties? Set them to None? mutation = self.mutation() # If the Key is complete, we should upsert @@ -381,13 +379,10 @@ def delete_entities(self, dataset_id, key_pbs): delete = mutation.delete.add() delete.CopyFrom(key_pb) - # TODO: Make this return value be a True/False (or something more useful). if self.transaction(): return True else: return self.commit(dataset_id, mutation) def delete_entity(self, dataset_id, key_pb): - # TODO: Is this the right way to handle deleting - # (single and multiple as separate methods)? return self.delete_entities(dataset_id, [key_pb]) diff --git a/gcloud/datastore/entity.py b/gcloud/datastore/entity.py index 3164c19e782e..28045f8d2157 100644 --- a/gcloud/datastore/entity.py +++ b/gcloud/datastore/entity.py @@ -166,7 +166,6 @@ def reload(self): # Note that you must have a valid key, otherwise this makes no sense. entity = self.dataset().get_entity(self.key().to_protobuf()) - # TODO(jjg): Raise an error if something dumb happens. if entity: self.update(entity) return self @@ -207,7 +206,6 @@ def delete(self): dataset_id=self.dataset().id(), key_pb=self.key().to_protobuf()) def __repr__(self): #pragma NO COVER - # TODO: Make sure that this makes sense. # An entity should have a key all the time (even if it's partial). if self.key(): return '' % (self.key().path(), super(Entity, self).__repr__()) diff --git a/gcloud/datastore/helpers.py b/gcloud/datastore/helpers.py index 23075381ff69..bde6cbafac66 100644 --- a/gcloud/datastore/helpers.py +++ b/gcloud/datastore/helpers.py @@ -97,5 +97,4 @@ def get_value_from_protobuf(pb): return pb.value.string_value else: - # TODO(jjg): Should we raise a ValueError here? return None diff --git a/gcloud/datastore/key.py b/gcloud/datastore/key.py index 8d40b396ca73..7f06daa0a416 100644 --- a/gcloud/datastore/key.py +++ b/gcloud/datastore/key.py @@ -6,7 +6,6 @@ class Key(object): - # TODO: Determine if this really should be immutable. """ An immutable representation of a datastore Key. """ diff --git a/gcloud/datastore/query.py b/gcloud/datastore/query.py index 97b68a644b91..821ee4ba9533 100644 --- a/gcloud/datastore/query.py +++ b/gcloud/datastore/query.py @@ -6,8 +6,6 @@ from gcloud.datastore.key import Key -# TODO: Figure out how to properly handle namespaces. - class Query(object): """A Query against the Cloud Datastore. @@ -60,7 +58,6 @@ def __init__(self, kind=None, dataset=None): self._pb.kind.add().name = kind def _clone(self): - # TODO(jjg): Double check that this makes sense... clone = copy.deepcopy(self) clone._dataset = self._dataset # Shallow copy the dataset. return clone @@ -216,8 +213,6 @@ def kind(self, *kinds): If a kind is provided, returns a clone of the :class:`Query` with those kinds set. """ - # TODO: Do we want this to be additive? - # If not, clear the _pb.kind attribute. if kinds: clone = self._clone() for kind in kinds: diff --git a/gcloud/storage/__init__.py b/gcloud/storage/__init__.py index f2e0b51f252b..b5337f28b333 100644 --- a/gcloud/storage/__init__.py +++ b/gcloud/storage/__init__.py @@ -31,7 +31,6 @@ __version__ = '0.1' -# TODO: Allow specific scopes and authorization levels. SCOPE = ('https://www.googleapis.com/auth/devstorage.full_control', 'https://www.googleapis.com/auth/devstorage.read_only', 'https://www.googleapis.com/auth/devstorage.read_write') diff --git a/gcloud/storage/acl.py b/gcloud/storage/acl.py index 100552431c3f..ecb1027541d4 100644 --- a/gcloud/storage/acl.py +++ b/gcloud/storage/acl.py @@ -101,7 +101,6 @@ def __init__(self, type, identifier=None): is optional. """ - # TODO: Add validation of types. self.identifier = identifier self.roles = set([]) self.type = type diff --git a/gcloud/storage/bucket.py b/gcloud/storage/bucket.py index bb77bc31c8ef..37691396db5e 100644 --- a/gcloud/storage/bucket.py +++ b/gcloud/storage/bucket.py @@ -149,13 +149,9 @@ def delete(self, force=False): :raises: :class:`gcloud.storage.exceptions.NotFoundError` """ - - # TODO: Make sure the proper exceptions are raised. - return self.connection.delete_bucket(self.name, force=force) def delete_key(self, key): - # TODO: Should we accept a 'silent' param here to not raise an exception? """Deletes a key from the current bucket. If the key isn't found, @@ -189,7 +185,6 @@ def delete_key(self, key): return key def delete_keys(self, keys): - # TODO: What should be the return value here? # NOTE: boto returns a MultiDeleteResult instance. for key in keys: self.delete_key(key) @@ -198,7 +193,6 @@ def copy_key(self): #pragma NO COVER raise NotImplementedError def upload_file(self, filename, key=None): - # TODO: What do we do about overwriting data? """Shortcut method to upload a file into this bucket. Use this method to quickly put a local file in Cloud Storage. @@ -392,7 +386,6 @@ def reload_acl(self): return self def get_acl(self): - # TODO: This might be a VERY long list. Use the specific API endpoint. """Get ACL metadata as a :class:`gcloud.storage.acl.BucketACL` object. :rtype: :class:`gcloud.storage.acl.BucketACL` diff --git a/gcloud/storage/connection.py b/gcloud/storage/connection.py index 294fe2ee5a22..4c336634c6dd 100644 --- a/gcloud/storage/connection.py +++ b/gcloud/storage/connection.py @@ -217,14 +217,12 @@ def api_request(self, method, path=None, query_params=None, response, content = self.make_request( method=method, url=url, data=data, content_type=content_type) - # TODO: Add better error handling. if response.status == 404: raise exceptions.NotFoundError(response, content) elif not 200 <= response.status < 300: raise exceptions.ConnectionError(response, content) if content and expect_json: - # TODO: Better checking on this header for JSON. content_type = response.get('content-type', '') if not content_type.startswith('application/json'): raise TypeError('Expected JSON, got %s' % content_type) @@ -282,8 +280,6 @@ def get_bucket(self, bucket_name, *args, **kwargs): :returns: The bucket matching the name provided. :raises: :class:`gcloud.storage.exceptions.NotFoundError` """ - - # TODO: URL-encode the bucket name to be safe? bucket = self.new_bucket(bucket_name) response = self.api_request(method='GET', path=bucket.path) return Bucket.from_dict(response, connection=self) @@ -317,7 +313,6 @@ def lookup(self, bucket_name): return None def create_bucket(self, bucket, *args, **kwargs): - # TODO: Which exceptions will this raise? """Create a new bucket. For example:: diff --git a/gcloud/storage/exceptions.py b/gcloud/storage/exceptions.py index dff01204bc95..1e5429d6d0b7 100644 --- a/gcloud/storage/exceptions.py +++ b/gcloud/storage/exceptions.py @@ -1,5 +1,3 @@ -# TODO: Make these super useful. - class StorageError(Exception): pass diff --git a/gcloud/storage/iterator.py b/gcloud/storage/iterator.py index 2d69ce2efaf3..fab97e26e2d0 100644 --- a/gcloud/storage/iterator.py +++ b/gcloud/storage/iterator.py @@ -186,8 +186,6 @@ def get_items_from_response(self, response): class KeyDataIterator(object): - # TODO: Use an actual HTTP streaming library, - # not multiple requests and the range header. def __init__(self, key): self.key = key diff --git a/gcloud/storage/key.py b/gcloud/storage/key.py index 3e69686ff3ca..2022ac5eccf1 100644 --- a/gcloud/storage/key.py +++ b/gcloud/storage/key.py @@ -72,8 +72,6 @@ def connection(self): :returns: The connection to use, or None if no connection is set. """ - # TODO: If a bucket isn't defined, this is basically useless. - # Where do we throw an error? if self.bucket and self.bucket.connection: return self.bucket.connection @@ -166,9 +164,6 @@ def get_contents_to_filename(self, filename): :raises: :class:`gcloud.storage.exceptions.NotFoundError` """ - # TODO: Add good error checking. - # TODO: Add good exception handling. - # TODO: Set timestamp? Make optional, default being to set it if possible? with open(filename, 'wb') as fh: self.get_contents_to_file(fh) @@ -239,8 +234,6 @@ def set_contents_from_file(self, fh, rewind=False, size=None, 'Content-Range': 'bytes %d-%d/%d' % (start, end, total_bytes), } - # TODO: Error checking for response code. - # TODO: Exponential backoff when errors come through. response, content = self.connection.make_request(content_type='text/plain', method='POST', url=upload_url, headers=headers, data=data) @@ -279,7 +272,6 @@ def set_contents_from_string(self, data, content_type='text/plain'): :returns: The updated Key object. """ - # TODO: How do we handle NotFoundErrors? string_buffer = StringIO() string_buffer.write(data) self.set_contents_from_file(fh=string_buffer, rewind=True, @@ -385,7 +377,6 @@ def reload_acl(self): return self def get_acl(self): - # TODO: This might be a VERY long list. Use the specific API endpoint. """Get ACL metadata as a :class:`gcloud.storage.acl.ObjectACL` object. :rtype: :class:`gcloud.storage.acl.ObjectACL` diff --git a/tox.ini b/tox.ini index 87f793420597..4e49a7656372 100644 --- a/tox.ini +++ b/tox.ini @@ -1,10 +1,9 @@ [tox] -envlist = +envlist = py26,py27,cover,docs -# TODO: add py32,py33,py34 [testenv] -commands = +commands = nosetests deps = nose @@ -13,7 +12,7 @@ deps = [testenv:cover] basepython = python2.7 -commands = +commands = nosetests --with-xunit --with-xcoverage --cover-package=gcloud --nocapture --cover-erase deps = nose @@ -24,7 +23,7 @@ deps = [testenv:docs] basepython = python2.7 -commands = +commands = sphinx-build -b html -d docs/_build/doctrees docs docs/_build/html # sphinx-build -b doctest -d docs/_build/doctrees docs docs/_build/doctest deps =