Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

logger.list_entries optimization using equals and docs update #1848

Merged
merged 2 commits into from
Jun 10, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 10 additions & 9 deletions docs/logging-usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ Fetch entries for the default project.
>>> entries, token = client.list_entries() # API call
>>> for entry in entries:
... timestamp = entry.timestamp.isoformat()
... print('%sZ: %s | %s' %
... (timestamp, entry.text_payload, entry.struct_payload))
... print('%sZ: %s' %
... (timestamp, entry.payload))
2016-02-17T20:35:49.031864072Z: A simple entry | None
2016-02-17T20:38:15.944418531Z: None | {'message': 'My second entry', 'weather': 'partly cloudy'}

Expand Down Expand Up @@ -129,7 +129,7 @@ Delete all entries for a logger
>>> from gcloud import logging
>>> client = logging.Client()
>>> logger = client.logger('log_name')
>>> logger.delete_entries() # API call
>>> logger.delete() # API call


Manage log metrics
Expand Down Expand Up @@ -220,8 +220,8 @@ Create a Cloud Storage sink:
>>> client = logging.Client()
>>> sink = client.sink(
... "robots-storage",
... filter='log:apache-access AND textPayload:robot')
>>> sink.storage_bucket = "my-bucket-name"
... 'log:apache-access AND textPayload:robot',
... 'storage.googleapis.com/my-bucket-name')
>>> sink.exists() # API call
False
>>> sink.create() # API call
Expand All @@ -236,8 +236,8 @@ Create a BigQuery sink:
>>> client = logging.Client()
>>> sink = client.sink(
... "robots-bq",
... filter='log:apache-access AND textPayload:robot')
>>> sink.bigquery_dataset = "projects/my-project/datasets/my-dataset"
... 'log:apache-access AND textPayload:robot',
... 'bigquery.googleapis.com/projects/projects/my-project/datasets/my-dataset')
>>> sink.exists() # API call
False
>>> sink.create() # API call
Expand All @@ -250,10 +250,11 @@ Create a Cloud Pub/Sub sink:

>>> from gcloud import logging
>>> client = logging.Client()

>>> sink = client.sink(
... "robots-pubsub",
... filter='log:apache-access AND textPayload:robot')
>>> sink.pubsub_topic = 'projects/my-project/topics/my-topic'
... 'log:apache-access AND textPayload:robot',
... 'pubsub.googleapis.com/projects/my-project/topics/my-topic')
>>> sink.exists() # API call
False
>>> sink.create() # API call
Expand Down
2 changes: 1 addition & 1 deletion gcloud/logging/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None,
more entries can be retrieved with another call (pass that
value as ``page_token``).
"""
log_filter = 'logName:%s' % (self.name,)
log_filter = 'logName=%s' % (self.full_name,)

This comment was marked as spam.

This comment was marked as spam.

This comment was marked as spam.

if filter_ is not None:
filter_ = '%s AND %s' % (filter_, log_filter)
else:
Expand Down
6 changes: 4 additions & 2 deletions gcloud/logging/test_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,8 @@ def test_delete_w_alternate_client(self):
def test_list_entries_defaults(self):
LISTED = {
'projects': None,
'filter_': 'logName:%s' % (self.LOGGER_NAME),
'filter_': 'logName=projects/%s/logs/%s' %
(self.PROJECT, self.LOGGER_NAME),
'order_by': None,
'page_size': None,
'page_token': None,
Expand All @@ -371,7 +372,8 @@ def test_list_entries_explicit(self):
PAGE_SIZE = 42
LISTED = {
'projects': ['PROJECT1', 'PROJECT2'],
'filter_': '%s AND logName:%s' % (FILTER, self.LOGGER_NAME),
'filter_': '%s AND logName=projects/%s/logs/%s' %
(FILTER, self.PROJECT, self.LOGGER_NAME),
'order_by': DESCENDING,
'page_size': PAGE_SIZE,
'page_token': TOKEN,
Expand Down