From 2da0fa788df191f53d14f5e12c608a24f31a1f98 Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Wed, 8 Jun 2016 14:35:27 -0700 Subject: [PATCH 1/2] Logging Docs Updates --- docs/logging-usage.rst | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst index 6711bf207a0a..9b23e2edd8ab 100644 --- a/docs/logging-usage.rst +++ b/docs/logging-usage.rst @@ -65,8 +65,8 @@ Fetch entries for the default project. >>> entries, token = client.list_entries() # API call >>> for entry in entries: ... timestamp = entry.timestamp.isoformat() - ... print('%sZ: %s | %s' % - ... (timestamp, entry.text_payload, entry.struct_payload)) + ... print('%sZ: %s' % + ... (timestamp, entry.payload)) 2016-02-17T20:35:49.031864072Z: A simple entry | None 2016-02-17T20:38:15.944418531Z: None | {'message': 'My second entry', 'weather': 'partly cloudy'} @@ -129,7 +129,7 @@ Delete all entries for a logger >>> from gcloud import logging >>> client = logging.Client() >>> logger = client.logger('log_name') - >>> logger.delete_entries() # API call + >>> logger.delete() # API call Manage log metrics @@ -220,8 +220,8 @@ Create a Cloud Storage sink: >>> client = logging.Client() >>> sink = client.sink( ... "robots-storage", - ... filter='log:apache-access AND textPayload:robot') - >>> sink.storage_bucket = "my-bucket-name" + ... 'log:apache-access AND textPayload:robot', + ... 'storage.googleapis.com/my-bucket-name') >>> sink.exists() # API call False >>> sink.create() # API call @@ -236,8 +236,8 @@ Create a BigQuery sink: >>> client = logging.Client() >>> sink = client.sink( ... "robots-bq", - ... filter='log:apache-access AND textPayload:robot') - >>> sink.bigquery_dataset = "projects/my-project/datasets/my-dataset" + ... 'log:apache-access AND textPayload:robot', + ... 'bigquery.googleapis.com/projects/projects/my-project/datasets/my-dataset') >>> sink.exists() # API call False >>> sink.create() # API call @@ -250,10 +250,11 @@ Create a Cloud Pub/Sub sink: >>> from gcloud import logging >>> client = logging.Client() + >>> sink = client.sink( ... "robots-pubsub", - ... filter='log:apache-access AND textPayload:robot') - >>> sink.pubsub_topic = 'projects/my-project/topics/my-topic' + ... 'log:apache-access AND textPayload:robot', + ... 'pubsub.googleapis.com/projects/my-project/topics/my-topic') >>> sink.exists() # API call False >>> sink.create() # API call From ea2a9076117093263aa9985fa11ce1bacb772205 Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Thu, 9 Jun 2016 11:14:52 -0700 Subject: [PATCH 2/2] Use equals for logger list_entries MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit logName in the v2 API expects the fully qualified name. The colon operator is “contains” so it works since the fully qualified name contains itself, but it is not indexed and can be extremely slow. --- gcloud/logging/logger.py | 2 +- gcloud/logging/test_logger.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 913e32eb07a7..ad2d4b70ca77 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -298,7 +298,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, more entries can be retrieved with another call (pass that value as ``page_token``). """ - log_filter = 'logName:%s' % (self.name,) + log_filter = 'logName=%s' % (self.full_name,) if filter_ is not None: filter_ = '%s AND %s' % (filter_, log_filter) else: diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 1ddcbaa4f6f3..fd7938f65b26 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -348,7 +348,8 @@ def test_delete_w_alternate_client(self): def test_list_entries_defaults(self): LISTED = { 'projects': None, - 'filter_': 'logName:%s' % (self.LOGGER_NAME), + 'filter_': 'logName=projects/%s/logs/%s' % + (self.PROJECT, self.LOGGER_NAME), 'order_by': None, 'page_size': None, 'page_token': None, @@ -371,7 +372,8 @@ def test_list_entries_explicit(self): PAGE_SIZE = 42 LISTED = { 'projects': ['PROJECT1', 'PROJECT2'], - 'filter_': '%s AND logName:%s' % (FILTER, self.LOGGER_NAME), + 'filter_': '%s AND logName=projects/%s/logs/%s' % + (FILTER, self.PROJECT, self.LOGGER_NAME), 'order_by': DESCENDING, 'page_size': PAGE_SIZE, 'page_token': TOKEN,