From f2becb075736f8faa8c5ed25b5d32fd65a29cb5a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 2 Mar 2016 15:09:14 -0500 Subject: [PATCH 01/68] Logging API usage docs. Squashed after review on PR #1488. --- docs/index.rst | 7 + docs/logging-usage.rst | 307 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 314 insertions(+) create mode 100644 docs/logging-usage.rst diff --git a/docs/index.rst b/docs/index.rst index 84afe9522fc8..312eac5c9777 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -105,6 +105,13 @@ search-index search-document +.. toctree:: + :maxdepth: 0 + :hidden: + :caption: Cloud Logging + + logging-usage + .. toctree:: :maxdepth: 0 :hidden: diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst new file mode 100644 index 000000000000..3f027c00d38d --- /dev/null +++ b/docs/logging-usage.rst @@ -0,0 +1,307 @@ +Using the API +============= + + +Authentication and Configuration +-------------------------------- + +- For an overview of authentication in ``gcloud-python``, + see :doc:`gcloud-auth`. + +- In addition to any authentication configuration, you should also set the + :envvar:`GCLOUD_PROJECT` environment variable for the project you'd like + to interact with. If you are Google App Engine or Google Compute Engine + this will be detected automatically. + +- After configuring your environment, create a + :class:`Client ` + + .. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + + or pass in ``credentials`` and ``project`` explicitly + + .. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client(project='my-project', credentials=creds) + + +Writing log entries +------------------- + +Write a simple text entry to a logger. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> logger.log_text("A simple entry") # API call + +Write a dictionary entry to a logger. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> logger.log_struct( + ... message="My second entry", + ... weather="partly cloudy") # API call + + +Retrieving log entries +---------------------- + +Fetch entries for the default project. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> entries, token = client.list_entries() # API call + >>> for entry in entries: + ... timestamp = entry.timestamp.isoformat() + ... print('%sZ: %s | %s' % + ... (timestamp, entry.text_payload, entry.struct_payload)) + 2016-02-17T20:35:49.031864072Z: A simple entry | None + 2016-02-17T20:38:15.944418531Z: None | {'message': 'My second entry', 'weather': 'partly cloudy'} + +Fetch entries across multiple projects. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> entries, token = client.list_entries( + ... project_ids=['one-project', 'another-project']) # API call + +Filter entries retrieved using the `Advanced Logs Filters`_ syntax + +.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> FILTER = "log:log_name AND textPayload:simple" + >>> entries, token = client.list_entries(filter=FILTER) # API call + +Sort entries in descending timestamp order. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> entries, token = client.list_entries(order_by=logging.DESCENDING) # API call + +Retrieve entities in batches of 10, iterating until done. + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> retrieved = [] + >>> token = None + >>> while True: + ... entries, token = client.list_entries(page_size=10, page_token=token) # API call + ... retrieved.extend(entries) + ... if token is None: + ... break + + +Delete all entries for a logger +------------------------------- + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> logger.delete_entries() # API call + + +Manage log metrics +------------------ + +Metrics are counters of entries which match a given filter. They can be +used within Cloud Monitoring to create charts and alerts. + +Create a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric( + ... "robots", "Robots all up in your server", + ... filter='log:apache-access AND textPayload:robot') + >>> metric.exists() # API call + False + >>> metric.create() # API call + >>> metric.exists() # API call + True + +List all metrics for a project: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metrics, token = client.list_metrics() + >>> len(metrics) + 1 + >>> metric = metrics[0] + >>> metric.name + "robots" + +Refresh local information about a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric("robots") + >>> metric.reload() # API call + >>> metric.description + "Robots all up in your server" + >>> metric.filter + "log:apache-access AND textPayload:robot" + +Update a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric("robots") + >>> metric.exists() # API call + True + >>> metric.reload() # API call + >>> metric.description = "Danger, Will Robinson!" + >>> metric.update() # API call + +Delete a metric: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> metric = client.metric("robots") + >>> metric.exists() # API call + True + >>> metric.delete() # API call + >>> metric.exists() # API call + False + + +Export log entries using sinks +------------------------------ + +Sinks allow exporting entries which match a given filter to Cloud Storage +buckets, BigQuery datasets, or Cloud Pub/Sub topics. + +Create a Cloud Storage sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots-storage", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.storage_bucket = "my-bucket-name" + >>> sink.exists() # API call + False + >>> sink.create() # API call + >>> sink.exists() # API call + True + +Create a BigQuery sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots-bq", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.bigquery_dataset = "projects/my-project/datasets/my-dataset" + >>> sink.exists() # API call + False + >>> sink.create() # API call + >>> sink.exists() # API call + True + +Create a Cloud Pub/Sub sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots-pubsub", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.pubsub_topic = 'projects/my-project/topics/my-topic' + >>> sink.exists() # API call + False + >>> sink.create() # API call + >>> sink.exists() # API call + True + +List all sinks for a project: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sinks, token = client.list_sinks() + >>> for sink in sinks: + ... print('%s: %s' % (sink.name, sink.destination)) + robots-storage: storage.googleapis.com/my-bucket-name + robots-bq: bigquery.googleapis.com/projects/my-project/datasets/my-dataset + robots-pubsub: pubsub.googleapis.com/projects/my-project/topics/my-topic + +Refresh local information about a sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink('robots-storage') + >>> sink.filter is None + True + >>> sink.reload() # API call + >>> sink.filter + 'log:apache-access AND textPayload:robot' + >>> sink.destination + 'storage.googleapis.com/my-bucket-name' + +Update a sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink("robots") + >>> sink.reload() # API call + >>> sink.filter = "log:apache-access" + >>> sink.update() # API call + +Delete a sink: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> sink = client.sink( + ... "robots", + ... filter='log:apache-access AND textPayload:robot') + >>> sink.exists() # API call + True + >>> sink.delete() # API call + >>> sink.exists() # API call + False From 57e8424d456ce9b348086e43ce0b8c4418560361 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 2 Mar 2016 15:46:32 -0500 Subject: [PATCH 02/68] Add logging connection and client. --- gcloud/logging/__init__.py | 22 ++++++++ gcloud/logging/client.py | 43 ++++++++++++++++ gcloud/logging/connection.py | 85 +++++++++++++++++++++++++++++++ gcloud/logging/test_client.py | 43 ++++++++++++++++ gcloud/logging/test_connection.py | 71 ++++++++++++++++++++++++++ 5 files changed, 264 insertions(+) create mode 100644 gcloud/logging/__init__.py create mode 100644 gcloud/logging/client.py create mode 100644 gcloud/logging/connection.py create mode 100644 gcloud/logging/test_client.py create mode 100644 gcloud/logging/test_connection.py diff --git a/gcloud/logging/__init__.py b/gcloud/logging/__init__.py new file mode 100644 index 000000000000..1c23d7a11804 --- /dev/null +++ b/gcloud/logging/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Logging API wrapper. +""" + +from gcloud.logging.client import Client +from gcloud.logging.connection import Connection + + +SCOPE = Connection.SCOPE diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py new file mode 100644 index 000000000000..8b0c6bb9c4b7 --- /dev/null +++ b/gcloud/logging/client.py @@ -0,0 +1,43 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Cloud Pub/Sub API.""" + + +from gcloud.client import JSONClient +from gcloud.logging.connection import Connection + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: string + :param project: the project which the client acts on behalf of. Will be + passed when creating a topic. If not passed, + falls back to the default inferred from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection diff --git a/gcloud/logging/connection.py b/gcloud/logging/connection.py new file mode 100644 index 000000000000..4d34ceaae14f --- /dev/null +++ b/gcloud/logging/connection.py @@ -0,0 +1,85 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with gcloud logging connections.""" + +from gcloud import connection as base_connection + + +class Connection(base_connection.JSONConnection): + """A connection to Google Cloud Pubsub via the JSON REST API. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: (Optional) HTTP object to make requests. + + :type api_base_url: string + :param api_base_url: The base of the API call URL. Defaults to the value + :attr:`Connection.API_BASE_URL`. + """ + + API_BASE_URL = 'https://logging.googleapis.com' + """The base of the API call URL.""" + + API_VERSION = 'v2beta1' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/logging.read', + 'https://www.googleapis.com/auth/logging.write', + 'https://www.googleapis.com/auth/logging.admin', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" + + def __init__(self, credentials=None, http=None, api_base_url=None): + super(Connection, self).__init__(credentials=credentials, http=http) + if api_base_url is None: + api_base_url = self.__class__.API_BASE_URL + self.api_base_url = api_base_url + + def build_api_url(self, path, query_params=None, + api_base_url=None, api_version=None): + """Construct an API url given a few components, some optional. + + Typically, you shouldn't need to use this method. + + :type path: string + :param path: The path to the resource. + + :type query_params: dict + :param query_params: A dictionary of keys and values to insert into + the query string of the URL. + + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. + + :type api_version: string + :param api_version: The version of the API to call. + Typically you shouldn't provide this and instead + use the default for the library. + + :rtype: string + :returns: The URL assembled from the pieces provided. + """ + if api_base_url is None: + api_base_url = self.api_base_url + return super(Connection, self.__class__).build_api_url( + path, query_params=query_params, + api_base_url=api_base_url, api_version=api_version) diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py new file mode 100644 index 000000000000..c21b9f935cea --- /dev/null +++ b/gcloud/logging/test_client.py @@ -0,0 +1,43 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestClient(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.logging.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + PROJECT = 'PROJECT' + CREDS = _Credentials() + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self diff --git a/gcloud/logging/test_connection.py b/gcloud/logging/test_connection.py new file mode 100644 index 000000000000..03dcb09cf91e --- /dev/null +++ b/gcloud/logging/test_connection.py @@ -0,0 +1,71 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestConnection(unittest2.TestCase): + + def _getTargetClass(self): + from gcloud.logging.connection import Connection + return Connection + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_default_url(self): + conn = self._makeOne() + klass = self._getTargetClass() + self.assertEqual(conn.api_base_url, klass.API_BASE_URL) + + def test_custom_url_from_constructor(self): + HOST = object() + conn = self._makeOne(api_base_url=HOST) + + klass = self._getTargetClass() + self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) + self.assertEqual(conn.api_base_url, HOST) + + def test_build_api_url_no_extra_query_params(self): + conn = self._makeOne() + URI = '/'.join([ + conn.API_BASE_URL, + conn.API_VERSION, + 'foo', + ]) + self.assertEqual(conn.build_api_url('/foo'), URI) + + def test_build_api_url_w_extra_query_params(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + conn = self._makeOne() + uri = conn.build_api_url('/foo', {'bar': 'baz'}) + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + self.assertEqual(path, + '/'.join(['', conn.API_VERSION, 'foo'])) + parms = dict(parse_qsl(qs)) + self.assertEqual(parms['bar'], 'baz') + + def test_build_api_url_w_base_url_override(self): + base_url1 = 'api-base-url1' + base_url2 = 'api-base-url2' + conn = self._makeOne(api_base_url=base_url1) + URI = '/'.join([ + base_url2, + conn.API_VERSION, + 'foo', + ]) + self.assertEqual( + conn.build_api_url('/foo', api_base_url=base_url2), URI) From 139519a925bb8616497dd39620da19e8da3405cb Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 2 Mar 2016 17:53:20 -0500 Subject: [PATCH 03/68] Clean up copy-pasta: - Copyright year -> 2016. - Pubsub -> Logging. - Other formatting issues from @dhermes' review. --- gcloud/logging/__init__.py | 5 ++--- gcloud/logging/client.py | 10 +++++----- gcloud/logging/connection.py | 6 +++--- gcloud/logging/test_client.py | 2 +- gcloud/logging/test_connection.py | 2 +- 5 files changed, 12 insertions(+), 13 deletions(-) diff --git a/gcloud/logging/__init__.py b/gcloud/logging/__init__.py index 1c23d7a11804..2a4ab1ab7806 100644 --- a/gcloud/logging/__init__.py +++ b/gcloud/logging/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google Cloud Logging API wrapper. -""" +"""Google Cloud Logging API wrapper.""" from gcloud.logging.client import Client from gcloud.logging.connection import Connection diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 8b0c6bb9c4b7..beaf9ed2217e 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Client for interacting with the Google Cloud Pub/Sub API.""" +"""Client for interacting with the Google Cloud Logging API.""" from gcloud.client import JSONClient @@ -23,9 +23,9 @@ class Client(JSONClient): """Client to bundle configuration needed for API requests. :type project: string - :param project: the project which the client acts on behalf of. Will be - passed when creating a topic. If not passed, - falls back to the default inferred from the environment. + :param project: the project which the client acts on behalf of. + If not passed, falls back to the default inferred + from the environment. :type credentials: :class:`oauth2client.client.OAuth2Credentials` or :class:`NoneType` diff --git a/gcloud/logging/connection.py b/gcloud/logging/connection.py index 4d34ceaae14f..9da66008f102 100644 --- a/gcloud/logging/connection.py +++ b/gcloud/logging/connection.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,7 +18,7 @@ class Connection(base_connection.JSONConnection): - """A connection to Google Cloud Pubsub via the JSON REST API. + """A connection to Google Cloud Logging via the JSON REST API. :type credentials: :class:`oauth2client.client.OAuth2Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this @@ -45,7 +45,7 @@ class Connection(base_connection.JSONConnection): 'https://www.googleapis.com/auth/logging.write', 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/cloud-platform') - """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" + """The scopes required for authenticating as a Cloud Logging consumer.""" def __init__(self, credentials=None, http=None, api_base_url=None): super(Connection, self).__init__(credentials=credentials, http=http) diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index c21b9f935cea..e3d13d5e859b 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/gcloud/logging/test_connection.py b/gcloud/logging/test_connection.py index 03dcb09cf91e..80239addecb2 100644 --- a/gcloud/logging/test_connection.py +++ b/gcloud/logging/test_connection.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 08d9565f33137a31fd24e535a33f2b0cae11fb96 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 2 Mar 2016 18:03:06 -0500 Subject: [PATCH 04/68] Drop redundant method overrides, left over from Pub/Sub. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1560#discussion_r54805779 https://github.com/GoogleCloudPlatform/gcloud-python/pull/1560#discussion_r54805791 --- gcloud/logging/connection.py | 37 --------------------------- gcloud/logging/test_connection.py | 42 ------------------------------- 2 files changed, 79 deletions(-) diff --git a/gcloud/logging/connection.py b/gcloud/logging/connection.py index 9da66008f102..1c330a28529e 100644 --- a/gcloud/logging/connection.py +++ b/gcloud/logging/connection.py @@ -46,40 +46,3 @@ class Connection(base_connection.JSONConnection): 'https://www.googleapis.com/auth/logging.admin', 'https://www.googleapis.com/auth/cloud-platform') """The scopes required for authenticating as a Cloud Logging consumer.""" - - def __init__(self, credentials=None, http=None, api_base_url=None): - super(Connection, self).__init__(credentials=credentials, http=http) - if api_base_url is None: - api_base_url = self.__class__.API_BASE_URL - self.api_base_url = api_base_url - - def build_api_url(self, path, query_params=None, - api_base_url=None, api_version=None): - """Construct an API url given a few components, some optional. - - Typically, you shouldn't need to use this method. - - :type path: string - :param path: The path to the resource. - - :type query_params: dict - :param query_params: A dictionary of keys and values to insert into - the query string of the URL. - - :type api_base_url: string - :param api_base_url: The base URL for the API endpoint. - Typically you won't have to provide this. - - :type api_version: string - :param api_version: The version of the API to call. - Typically you shouldn't provide this and instead - use the default for the library. - - :rtype: string - :returns: The URL assembled from the pieces provided. - """ - if api_base_url is None: - api_base_url = self.api_base_url - return super(Connection, self.__class__).build_api_url( - path, query_params=query_params, - api_base_url=api_base_url, api_version=api_version) diff --git a/gcloud/logging/test_connection.py b/gcloud/logging/test_connection.py index 80239addecb2..0562be5df870 100644 --- a/gcloud/logging/test_connection.py +++ b/gcloud/logging/test_connection.py @@ -27,45 +27,3 @@ def _makeOne(self, *args, **kw): def test_default_url(self): conn = self._makeOne() klass = self._getTargetClass() - self.assertEqual(conn.api_base_url, klass.API_BASE_URL) - - def test_custom_url_from_constructor(self): - HOST = object() - conn = self._makeOne(api_base_url=HOST) - - klass = self._getTargetClass() - self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) - self.assertEqual(conn.api_base_url, HOST) - - def test_build_api_url_no_extra_query_params(self): - conn = self._makeOne() - URI = '/'.join([ - conn.API_BASE_URL, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo'), URI) - - def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - conn = self._makeOne() - uri = conn.build_api_url('/foo', {'bar': 'baz'}) - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, - '/'.join(['', conn.API_VERSION, 'foo'])) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') - - def test_build_api_url_w_base_url_override(self): - base_url1 = 'api-base-url1' - base_url2 = 'api-base-url2' - conn = self._makeOne(api_base_url=base_url1) - URI = '/'.join([ - base_url2, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual( - conn.build_api_url('/foo', api_base_url=base_url2), URI) From 586113f9b6f78a78e6b3d2653184a4d6189cb734 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 2 Mar 2016 18:12:02 -0500 Subject: [PATCH 05/68] Add assertions to placeholder testcase methods. --- gcloud/logging/test_client.py | 5 +++-- gcloud/logging/test_connection.py | 17 ++++++++++++++++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index e3d13d5e859b..d5c17fcfa207 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -26,8 +26,9 @@ def _makeOne(self, *args, **kw): def test_ctor(self): PROJECT = 'PROJECT' - CREDS = _Credentials() - CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + creds = _Credentials() + client = self._makeOne(project=PROJECT, credentials=creds) + self.assertEqual(client.project, PROJECT) class _Credentials(object): diff --git a/gcloud/logging/test_connection.py b/gcloud/logging/test_connection.py index 0562be5df870..2939b683305e 100644 --- a/gcloud/logging/test_connection.py +++ b/gcloud/logging/test_connection.py @@ -25,5 +25,20 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_default_url(self): - conn = self._makeOne() + creds = _Credentials() + conn = self._makeOne(creds) klass = self._getTargetClass() + self.assertEqual(conn.credentials._scopes, klass.SCOPE) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self From 44f95583b062cea5a90d56f2557e441562760b9a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 3 Mar 2016 11:24:48 -0500 Subject: [PATCH 06/68] Fix docs coverage for logging. --- docs/index.rst | 1 + docs/logging-client.rst | 16 ++++++++++++++++ scripts/verify_included_modules.py | 1 + 3 files changed, 18 insertions(+) create mode 100644 docs/logging-client.rst diff --git a/docs/index.rst b/docs/index.rst index 312eac5c9777..f330bf4f9c31 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -111,6 +111,7 @@ :caption: Cloud Logging logging-usage + Client .. toctree:: :maxdepth: 0 diff --git a/docs/logging-client.rst b/docs/logging-client.rst new file mode 100644 index 000000000000..528414e1a2e3 --- /dev/null +++ b/docs/logging-client.rst @@ -0,0 +1,16 @@ +Logging Client +============== + +.. automodule:: gcloud.logging.client + :members: + :undoc-members: + :show-inheritance: + +Connection +~~~~~~~~~~ + +.. automodule:: gcloud.logging.connection + :members: + :undoc-members: + :show-inheritance: + diff --git a/scripts/verify_included_modules.py b/scripts/verify_included_modules.py index eb1a6f3571fe..41bb658ebe23 100644 --- a/scripts/verify_included_modules.py +++ b/scripts/verify_included_modules.py @@ -36,6 +36,7 @@ 'gcloud.datastore.__init__', 'gcloud.dns.__init__', 'gcloud.iterator', + 'gcloud.logging.__init__', 'gcloud.pubsub.__init__', 'gcloud.resource_manager.__init__', 'gcloud.search.__init__', From b5798fd4e468e1bf539688cd370b0bec6e6d2747 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 3 Mar 2016 12:21:52 -0500 Subject: [PATCH 07/68] Add 'Logger' class and factory. --- docs/index.rst | 1 + docs/logging-logger.rst | 8 ++++++ gcloud/logging/client.py | 12 +++++++++ gcloud/logging/logger.py | 43 ++++++++++++++++++++++++++++++ gcloud/logging/test_client.py | 17 +++++++++--- gcloud/logging/test_logger.py | 50 +++++++++++++++++++++++++++++++++++ 6 files changed, 128 insertions(+), 3 deletions(-) create mode 100644 docs/logging-logger.rst create mode 100644 gcloud/logging/logger.py create mode 100644 gcloud/logging/test_logger.py diff --git a/docs/index.rst b/docs/index.rst index f330bf4f9c31..5dd66181f949 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -112,6 +112,7 @@ logging-usage Client + logging-logger .. toctree:: :maxdepth: 0 diff --git a/docs/logging-logger.rst b/docs/logging-logger.rst new file mode 100644 index 000000000000..8deb9b434534 --- /dev/null +++ b/docs/logging-logger.rst @@ -0,0 +1,8 @@ +Logger +====== + +.. automodule:: gcloud.logging.logger + :members: + :undoc-members: + :show-inheritance: + diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index beaf9ed2217e..4499fcdf9c4e 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -17,6 +17,7 @@ from gcloud.client import JSONClient from gcloud.logging.connection import Connection +from gcloud.logging.logger import Logger class Client(JSONClient): @@ -41,3 +42,14 @@ class Client(JSONClient): """ _connection_class = Connection + + def logger(self, name): + """Creates a logger bound to the current client. + + :type name: string + :param name: the name of the logger to be constructed. + + :rtype: :class:`gcloud.pubsub.logger.Logger` + :returns: Logger created with the current client. + """ + return Logger(name, client=self) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py new file mode 100644 index 000000000000..2463b9c625e1 --- /dev/null +++ b/gcloud/logging/logger.py @@ -0,0 +1,43 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Loggers.""" + + +class Logger(object): + """Loggers represent named targets for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs + + :type name: string + :param name: the name of the logger + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the logger (which requires a project). + """ + def __init__(self, name, client): + self.name = name + self._client = client + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index d5c17fcfa207..707b3a381209 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -17,6 +17,9 @@ class TestClient(unittest2.TestCase): + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + def _getTargetClass(self): from gcloud.logging.client import Client return Client @@ -25,10 +28,18 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_ctor(self): - PROJECT = 'PROJECT' creds = _Credentials() - client = self._makeOne(project=PROJECT, credentials=creds) - self.assertEqual(client.project, PROJECT) + client = self._makeOne(project=self.PROJECT, credentials=creds) + self.assertEqual(client.project, self.PROJECT) + + def test_logger(self): + creds = _Credentials() + + client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + logger = client_obj.logger(self.LOGGER_NAME) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client_obj) + self.assertEqual(logger.project, self.PROJECT) class _Credentials(object): diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py new file mode 100644 index 000000000000..e3d8107d882c --- /dev/null +++ b/gcloud/logging/test_logger.py @@ -0,0 +1,50 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestLogger(unittest2.TestCase): + + PROJECT = 'test-project' + LOGGER_NAME = 'logger-name' + + def _getTargetClass(self): + from gcloud.logging.logger import Logger + return Logger + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + +class _Client(object): + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection From d38fd237e9cf8b46bcf46cf9dfb9e1c4d42779ba Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 3 Mar 2016 12:54:02 -0500 Subject: [PATCH 08/68] Fix copyright year. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1564#discussion_r54917458 --- gcloud/logging/logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 2463b9c625e1..9d76fa6ac5e7 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From b4668932f028dea4b9bb27d8dcdeae6a3962f701 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 3 Mar 2016 14:38:47 -0500 Subject: [PATCH 09/68] Add 'Logger.log_text' method. --- gcloud/logging/logger.py | 46 +++++++++++++++++++++++++++++++ gcloud/logging/test_logger.py | 52 +++++++++++++++++++++++++++++++++++ 2 files changed, 98 insertions(+) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 9d76fa6ac5e7..7d5959347d7c 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -41,3 +41,49 @@ def client(self): def project(self): """Project bound to the logger.""" return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in logging APIs""" + return 'projects/%s/logs/%s' % (self.project, self.name) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def log_text(self, text, client=None): + """API call: log a text message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type text: text + :param text: the log message. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + + data = { + 'entries': [{ + 'logName': self.full_name, + 'textPayload': text, + 'resource': { + 'type': 'global', + }, + }], + } + client.connection.api_request( + method='POST', path='/entries:write', data=data) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index e3d8107d882c..fcac20719c87 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -34,6 +34,53 @@ def test_ctor(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertTrue(logger.client is client) self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + + def test_log_text_w_str_implicit_client(self): + TEXT = 'TEXT' + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_text(TEXT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_text_w_unicode_explicit_client(self): + TEXT = u'TEXT' + conn = _Connection({}) + client1 = _Client(self.PROJECT, object()) + client2 = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client1) + logger.log_text(TEXT, client=client2) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) class _Connection(object): @@ -42,6 +89,11 @@ def __init__(self, *responses): self._responses = responses self._requested = [] + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response + class _Client(object): From cb3772dc2c6e322ec02601d97156eebf3c1cf135 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 13:17:40 -0500 Subject: [PATCH 10/68] Fix copy-pasta in 'Client.logger' docstring. --- gcloud/logging/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 4499fcdf9c4e..6be5a7835aba 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -49,7 +49,7 @@ def logger(self, name): :type name: string :param name: the name of the logger to be constructed. - :rtype: :class:`gcloud.pubsub.logger.Logger` + :rtype: :class:`gcloud.logging.logger.Logger` :returns: Logger created with the current client. """ return Logger(name, client=self) From 9ec6addc16f81d5eb35db311bfb61aceabf22675 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 3 Mar 2016 15:37:06 -0500 Subject: [PATCH 11/68] Add 'Logger.log_struct' method. --- gcloud/logging/logger.py | 27 +++++++++++++++++++++ gcloud/logging/test_logger.py | 45 +++++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 7d5959347d7c..8f280f550b26 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -87,3 +87,30 @@ def log_text(self, text, client=None): } client.connection.api_request( method='POST', path='/entries:write', data=data) + + def log_struct(self, info, client=None): + """API call: log a text message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type info: dict + :param info: the log entry information + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + + data = { + 'entries': [{ + 'logName': self.full_name, + 'jsonPayload': info, + 'resource': { + 'type': 'global', + }, + }], + } + client.connection.api_request( + method='POST', path='/entries:write', data=data) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index fcac20719c87..317e48a8e70c 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -82,6 +82,51 @@ def test_log_text_w_unicode_explicit_client(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) + def test_log_struct_w_implicit_client(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_struct(STRUCT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_struct_w_explicit_client(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + conn = _Connection({}) + client1 = _Client(self.PROJECT, object()) + client2 = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client1) + logger.log_struct(STRUCT, client=client2) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + class _Connection(object): From b0ad3e08d01751dee5543bf026b822e30f154c85 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Mar 2016 17:22:00 -0500 Subject: [PATCH 12/68] Add 'Logger.delete' method. --- gcloud/logging/logger.py | 14 ++++++++++++++ gcloud/logging/test_logger.py | 25 +++++++++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 8f280f550b26..ceeb356ede73 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -114,3 +114,17 @@ def log_struct(self, info, client=None): } client.connection.api_request( method='POST', path='/entries:write', data=data) + + def delete(self, client=None): + """API call: delete all entries in a logger via a DELETE request + + See: + https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/delete + + :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + client.connection.api_request( + method='DELETE', path='/%s' % self.full_name) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 317e48a8e70c..d5f1c9a56b9a 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -127,6 +127,31 @@ def test_log_struct_w_explicit_client(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) + def test_delete_w_bound_client(self): + PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = self._makeOne(self.LOGGER_NAME, client=CLIENT) + logger.delete() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + + def test_delete_w_alternate_client(self): + PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + conn1 = _Connection({}) + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + logger = self._makeOne(self.LOGGER_NAME, client=CLIENT1) + logger.delete(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % PATH) + class _Connection(object): From d7a2e729fd069998722fc7a82ed9c1594e5c6340 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 3 Mar 2016 17:29:20 -0500 Subject: [PATCH 13/68] Add 'Client.list_entries'. --- docs/index.rst | 1 + docs/logging-entries.rst | 8 ++ gcloud/_helpers.py | 38 ++++- gcloud/logging/__init__.py | 2 + gcloud/logging/_helpers.py | 46 ++++++ gcloud/logging/client.py | 81 +++++++++++ gcloud/logging/entries.py | 93 ++++++++++++ gcloud/logging/test__helpers.py | 36 +++++ gcloud/logging/test_client.py | 129 ++++++++++++++++- gcloud/logging/test_entries.py | 243 ++++++++++++++++++++++++++++++++ gcloud/test__helpers.py | 86 ++++++++++- 11 files changed, 758 insertions(+), 5 deletions(-) create mode 100644 docs/logging-entries.rst create mode 100644 gcloud/logging/_helpers.py create mode 100644 gcloud/logging/entries.py create mode 100644 gcloud/logging/test__helpers.py create mode 100644 gcloud/logging/test_entries.py diff --git a/docs/index.rst b/docs/index.rst index 5dd66181f949..e2edfb05c7e8 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -113,6 +113,7 @@ logging-usage Client logging-logger + logging-entries .. toctree:: :maxdepth: 0 diff --git a/docs/logging-entries.rst b/docs/logging-entries.rst new file mode 100644 index 000000000000..a7b96721d30b --- /dev/null +++ b/docs/logging-entries.rst @@ -0,0 +1,8 @@ +Entries +======= + +.. automodule:: gcloud.logging.entries + :members: + :undoc-members: + :show-inheritance: + diff --git a/gcloud/_helpers.py b/gcloud/_helpers.py index dd85a54e405f..7b91c00271da 100644 --- a/gcloud/_helpers.py +++ b/gcloud/_helpers.py @@ -38,6 +38,16 @@ _NOW = datetime.datetime.utcnow # To be replaced by tests. _RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' +_RFC3339_NO_FRACTION = '%Y-%m-%dT%H:%M:%S' +# datetime.strptime cannot handle nanosecond precision: parse w/ regex +_RFC3339_NANOS = re.compile(r""" + (?P + \d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2} # YYYY-MM-DDTHH:MM:SS + ) + \. # decimal point + (?P\d{9}) # nanoseconds + Z # Zulu +""", re.VERBOSE) class _LocalStack(Local): @@ -301,7 +311,7 @@ def _total_seconds(offset): def _rfc3339_to_datetime(dt_str): - """Convert a string to a native timestamp. + """Convert a microsecond-precision timetamp to a native datetime. :type dt_str: str :param dt_str: The string to convert. @@ -313,6 +323,32 @@ def _rfc3339_to_datetime(dt_str): dt_str, _RFC3339_MICROS).replace(tzinfo=UTC) +def _rfc3339_nanos_to_datetime(dt_str): + """Convert a nanosecond-precision timestamp to a native datetime. + + .. note:: + + Python datetimes do not support nanosecond precision; this function + therefore truncates such values to microseconds. + + :type dt_str: str + :param dt_str: The string to convert. + + :rtype: :class:`datetime.datetime` + :returns: The datetime object created from the string. + """ + with_nanos = _RFC3339_NANOS.match(dt_str) + if with_nanos is None: + raise ValueError( + 'Timestamp: %r, does not match pattern: %r' % ( + dt_str, _RFC3339_NANOS.pattern)) + bare_seconds = datetime.datetime.strptime( + with_nanos.group('no_fraction'), _RFC3339_NO_FRACTION) + nanos = int(with_nanos.group('nanos')) + micros = nanos // 1000 + return bare_seconds.replace(microsecond=micros, tzinfo=UTC) + + def _datetime_to_rfc3339(value): """Convert a native timestamp to a string. diff --git a/gcloud/logging/__init__.py b/gcloud/logging/__init__.py index 2a4ab1ab7806..67b0386329e9 100644 --- a/gcloud/logging/__init__.py +++ b/gcloud/logging/__init__.py @@ -19,3 +19,5 @@ SCOPE = Connection.SCOPE +ASCENDING = 'timestamp asc' +DESCENDING = 'timestamp desc' diff --git a/gcloud/logging/_helpers.py b/gcloud/logging/_helpers.py new file mode 100644 index 000000000000..aadd2aacdc52 --- /dev/null +++ b/gcloud/logging/_helpers.py @@ -0,0 +1,46 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for shared behavior.""" + +import re + +from gcloud._helpers import _name_from_project_path + + +_LOGGER_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /logs/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def logger_name_from_path(path, project): + """Validate a logger URI path and get the logger name. + + :type path: string + :param path: URI path for a logger API request. + + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + + :rtype: string + :returns: Topic name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _LOGGER_TEMPLATE) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 4499fcdf9c4e..7f053eeeb775 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -17,6 +17,8 @@ from gcloud.client import JSONClient from gcloud.logging.connection import Connection +from gcloud.logging.entries import StructEntry +from gcloud.logging.entries import TextEntry from gcloud.logging.logger import Logger @@ -53,3 +55,82 @@ def logger(self, name): :returns: Logger created with the current client. """ return Logger(name, client=self) + + def _entry_from_resource(self, resource, loggers): + """Detect correct entry type from resource and instantiate. + + :type resource: dict + :param resource: one entry resource from API response + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype; One of: + :class:`gcloud.logging.entries.TextEntry`, + :class:`gcloud.logging.entries.StructEntry`, + :returns: the entry instance, constructed via the resource + """ + if 'textPayload' in resource: + return TextEntry.from_api_repr(resource, self, loggers) + elif 'jsonPayload' in resource: + return StructEntry.from_api_repr(resource, self, loggers) + raise ValueError('Cannot parse job resource') + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: string + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: string + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + if projects is None: + projects = [self.project] + + params = {'projectIds': projects} + + if filter_ is not None: + params['filter'] = filter_ + + if order_by is not None: + params['orderBy'] = order_by + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + resp = self.connection.api_request(method='POST', path='/entries:list', + data=params) + loggers = {} + entries = [self._entry_from_resource(resource, loggers) + for resource in resp.get('entries', ())] + return entries, resp.get('nextPageToken') diff --git a/gcloud/logging/entries.py b/gcloud/logging/entries.py new file mode 100644 index 000000000000..badeea2bb81b --- /dev/null +++ b/gcloud/logging/entries.py @@ -0,0 +1,93 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Log entries within the Google Cloud Logging API.""" + +from gcloud._helpers import _rfc3339_nanos_to_datetime +from gcloud.logging._helpers import logger_name_from_path + + +class _BaseEntry(object): + """Base class for TextEntry, StructEntry. + + :type payload: text or dict + :param payload: The payload passed as ``textPayload``, ``jsonPayload``, + or ``protoPayload``. + + :type logger: :class:`gcloud.logging.logger.Logger` + :param logger: the logger used to write the entry. + + :type insert_id: text, or :class:`NoneType` + :param insert_id: (optional) the ID used to identify an entry uniquely. + + :type timestamp: :class:`datetime.datetime`, or :class:`NoneType` + :param timestamp: (optional) timestamp for the entry + """ + def __init__(self, payload, logger, insert_id=None, timestamp=None): + self.payload = payload + self.logger = logger + self.insert_id = insert_id + self.timestamp = timestamp + + @classmethod + def from_api_repr(cls, resource, client, loggers=None): + """Factory: construct an entry given its API representation + + :type resource: dict + :param resource: text entry resource representation returned from + the API + + :type client: :class:`gcloud.logging.client.Client` + :param client: Client which holds credentials and project + configuration. + + :type loggers: dict or None + :param loggers: A mapping of logger fullnames -> loggers. If not + passed, the entry will have a newly-created logger. + + :rtype: :class:`gcloud.logging.entries.TextEntry` + :returns: Text entry parsed from ``resource``. + """ + if loggers is None: + loggers = {} + logger_fullname = resource['logName'] + logger = loggers.get(logger_fullname) + if logger is None: + logger_name = logger_name_from_path( + logger_fullname, client.project) + logger = loggers[logger_fullname] = client.logger(logger_name) + payload = resource[cls._PAYLOAD_KEY] + insert_id = resource.get('insertId') + timestamp = resource.get('timestamp') + if timestamp is not None: + timestamp = _rfc3339_nanos_to_datetime(timestamp) + return cls(payload, logger, insert_id, timestamp) + + +class TextEntry(_BaseEntry): + """Entry created via a write request with ``textPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'textPayload' + + +class StructEntry(_BaseEntry): + """Entry created via a write request with ``jsonPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'jsonPayload' diff --git a/gcloud/logging/test__helpers.py b/gcloud/logging/test__helpers.py new file mode 100644 index 000000000000..a70d40218186 --- /dev/null +++ b/gcloud/logging/test__helpers.py @@ -0,0 +1,36 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class Test_logger_name_from_path(unittest2.TestCase): + + def _callFUT(self, path, project): + from gcloud.logging._helpers import logger_name_from_path + return logger_name_from_path(path, project) + + def test_w_simple_name(self): + LOGGER_NAME = 'LOGGER_NAME' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH, PROJECT) + self.assertEqual(logger_name, LOGGER_NAME) + + def test_w_name_w_all_extras(self): + LOGGER_NAME = 'LOGGER_NAME-part.one~part.two%part-three' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/logs/%s' % (PROJECT, LOGGER_NAME) + logger_name = self._callFUT(PATH, PROJECT) + self.assertEqual(logger_name, LOGGER_NAME) diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index 707b3a381209..11a6935fd1b2 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -34,12 +34,123 @@ def test_ctor(self): def test_logger(self): creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + logger = client.logger(self.LOGGER_NAME) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + def test__entry_from_resource_unknown_type(self): + PROJECT = 'PROJECT' + creds = _Credentials() + client = self._makeOne(PROJECT, creds) + loggers = {} + with self.assertRaises(ValueError): + client._entry_from_resource({'unknownPayload': {}}, loggers) + + def test_list_entries_defaults(self): + from datetime import datetime + from gcloud._helpers import UTC + from gcloud.logging.entries import TextEntry + from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + IID1 = 'IID1' + TEXT = 'TEXT' + SENT = { + 'projectIds': [self.PROJECT], + } + TOKEN = 'TOKEN' + RETURNED = { + 'entries': [{ + 'textPayload': TEXT, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + 'nextPageToken': TOKEN, + } + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + conn = client.connection = _Connection(RETURNED) + entries, token = client.list_entries() + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertTrue(isinstance(entry, TextEntry)) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, TEXT) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(token, TOKEN) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:list') + self.assertEqual(req['data'], SENT) - client_obj = self._makeOne(project=self.PROJECT, credentials=creds) - logger = client_obj.logger(self.LOGGER_NAME) + def test_list_entries_explicit(self): + from datetime import datetime + from gcloud._helpers import UTC + from gcloud.logging import DESCENDING + from gcloud.logging.entries import StructEntry + from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'logName:LOGNAME' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + IID1 = 'IID1' + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + SENT = { + 'projectIds': [PROJECT1, PROJECT2], + 'filter': FILTER, + 'orderBy': DESCENDING, + 'pageSize': PAGE_SIZE, + 'pageToken': TOKEN, + } + RETURNED = { + 'entries': [{ + 'jsonPayload': PAYLOAD, + 'insertId': IID1, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + }], + } + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + conn = client.connection = _Connection(RETURNED) + entries, token = client.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertTrue(isinstance(entry, StructEntry)) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertTrue(logger.client is client_obj) + self.assertTrue(logger.client is client) self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(token, None) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:list') + self.assertEqual(req['data'], SENT) class _Credentials(object): @@ -53,3 +164,15 @@ def create_scoped_required(): def create_scoped(self, scope): self._scopes = scope return self + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + self._requested.append(kw) + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py new file mode 100644 index 000000000000..40815ba6ff7c --- /dev/null +++ b/gcloud/logging/test_entries.py @@ -0,0 +1,243 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestTextEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import TextEntry + return TextEntry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + PAYLOAD = 'PAYLOAD' + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + + def test_ctor_explicit(self): + import datetime + PAYLOAD = 'PAYLOAD' + IID = 'IID' + TIMESTAMP = datetime.datetime.now() + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, TIMESTAMP) + + def test_from_api_repr_missing_data_no_loggers(self): + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'textPayload': PAYLOAD, + 'logName': LOG_NAME, + } + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + + def test_from_api_repr_w_loggers_no_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'textPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + } + loggers = {} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertEqual(loggers, {LOG_NAME: logger}) + + def test_from_api_repr_w_loggers_w_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = 'PAYLOAD' + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'textPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + } + LOGGER = object() + loggers = {LOG_NAME: LOGGER} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertTrue(entry.logger is LOGGER) + + +class TestStructEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import StructEntry + return StructEntry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + + def test_ctor_explicit(self): + import datetime + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + IID = 'IID' + TIMESTAMP = datetime.datetime.now() + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, TIMESTAMP) + + def test_from_api_repr_missing_data_no_loggers(self): + client = _Client(self.PROJECT) + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'jsonPayload': PAYLOAD, + 'logName': LOG_NAME, + } + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + + def test_from_api_repr_w_loggers_no_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'jsonPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + } + loggers = {} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertEqual(loggers, {LOG_NAME: logger}) + + def test_from_api_repr_w_loggers_w_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'jsonPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + } + LOGGER = object() + loggers = {LOG_NAME: LOGGER} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertTrue(entry.logger is LOGGER) + + +def _datetime_to_rfc3339_w_nanos(value): + from gcloud._helpers import _RFC3339_NO_FRACTION + no_fraction = value.strftime(_RFC3339_NO_FRACTION) + return '%s.%09dZ' % (no_fraction, value.microsecond * 1000) + + +class _Logger(object): + + def __init__(self, name, client): + self.name = name + self.client = client + + +class _Client(object): + + def __init__(self, project): + self.project = project + + def logger(self, name): + return _Logger(name, self) diff --git a/gcloud/test__helpers.py b/gcloud/test__helpers.py index 4f2cb849c89d..00aa5075c731 100644 --- a/gcloud/test__helpers.py +++ b/gcloud/test__helpers.py @@ -411,7 +411,21 @@ def _callFUT(self, dt_str): from gcloud._helpers import _rfc3339_to_datetime return _rfc3339_to_datetime(dt_str) - def test_it(self): + def test_w_bogus_zone(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_microseconds(self): import datetime from gcloud._helpers import UTC @@ -430,6 +444,76 @@ def test_it(self): year, month, day, hour, minute, seconds, micros, UTC) self.assertEqual(result, expected_result) + def test_w_naonseconds(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + nanos = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%09dZ' % ( + year, month, day, hour, minute, seconds, nanos) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + +class Test__rfc3339_nanos_to_datetime(unittest2.TestCase): + + def _callFUT(self, dt_str): + from gcloud._helpers import _rfc3339_nanos_to_datetime + return _rfc3339_nanos_to_datetime(dt_str) + + def test_w_bogus_zone(self): + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456789 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dBOGUS' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_microseconds(self): + + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + micros = 123456 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dZ' % ( + year, month, day, hour, minute, seconds, micros) + with self.assertRaises(ValueError): + self._callFUT(dt_str) + + def test_w_naonseconds(self): + import datetime + from gcloud._helpers import UTC + + year = 2009 + month = 12 + day = 17 + hour = 12 + minute = 44 + seconds = 32 + nanos = 123456789 + micros = nanos // 1000 + + dt_str = '%d-%02d-%02dT%02d:%02d:%02d.%06dZ' % ( + year, month, day, hour, minute, seconds, nanos) + result = self._callFUT(dt_str) + expected_result = datetime.datetime( + year, month, day, hour, minute, seconds, micros, UTC) + self.assertEqual(result, expected_result) + class Test__datetime_to_rfc3339(unittest2.TestCase): From e4b4b0d2d1a6bfb9348ff2f073a9577cdf03c378 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 9 Mar 2016 16:17:14 -0500 Subject: [PATCH 14/68] Add 'Logger.list_entries'. --- docs/logging-usage.rst | 10 ++++++- gcloud/logging/logger.py | 43 ++++++++++++++++++++++++++++++ gcloud/logging/test_logger.py | 49 +++++++++++++++++++++++++++++++++++ 3 files changed, 101 insertions(+), 1 deletion(-) diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst index 3f027c00d38d..6711bf207a0a 100644 --- a/docs/logging-usage.rst +++ b/docs/logging-usage.rst @@ -98,7 +98,7 @@ Sort entries in descending timestamp order. >>> client = logging.Client() >>> entries, token = client.list_entries(order_by=logging.DESCENDING) # API call -Retrieve entities in batches of 10, iterating until done. +Retrieve entries in batches of 10, iterating until done. .. doctest:: @@ -112,6 +112,14 @@ Retrieve entities in batches of 10, iterating until done. ... if token is None: ... break +Retrieve entries for a single logger, sorting in descending timestamp order: + +.. doctest:: + + >>> from gcloud import logging + >>> client = logging.Client() + >>> logger = client.logger('log_name') + >>> entries, token = logger.list_entries(order_by=logging.DESCENDING) # API call Delete all entries for a logger ------------------------------- diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index ceeb356ede73..f071802bd5c9 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -128,3 +128,46 @@ def delete(self, client=None): client = self._require_client(client) client.connection.api_request( method='DELETE', path='/%s' % self.full_name) + + def list_entries(self, projects=None, filter_=None, order_by=None, + page_size=None, page_token=None): + """Return a page of log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list + + :type projects: list of strings + :param projects: project IDs to include. If not passed, + defaults to the project bound to the client. + + :type filter_: string + :param filter_: a filter expression. See: + https://cloud.google.com/logging/docs/view/advanced_filters + + :type order_by: string + :param order_by: One of :data:`gcloud.logging.ASCENDING` or + :data:`gcloud.logging.DESCENDING`. + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a + "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + log_filter = 'logName:%s' % (self.name,) + if filter_ is not None: + filter_ = '%s AND %s' % (filter_, log_filter) + else: + filter_ = log_filter + return self.client.list_entries( + projects=projects, filter_=filter_, order_by=order_by, + page_size=page_size, page_token=page_token) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index d5f1c9a56b9a..920233aaeab4 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -152,6 +152,48 @@ def test_delete_w_alternate_client(self): self.assertEqual(req['method'], 'DELETE') self.assertEqual(req['path'], '/%s' % PATH) + def test_list_entries_defaults(self): + LISTED = { + 'projects': None, + 'filter_': 'logName:%s' % (self.LOGGER_NAME), + 'order_by': None, + 'page_size': None, + 'page_token': None, + } + TOKEN = 'TOKEN' + conn = _Connection() + client = _Client(self.PROJECT, conn) + client._token = TOKEN + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries() + self.assertEqual(len(entries), 0) + self.assertEqual(token, TOKEN) + self.assertEqual(client._listed, LISTED) + + def test_list_entries_explicit(self): + from gcloud.logging import DESCENDING + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + FILTER = 'resource.type:global' + TOKEN = 'TOKEN' + PAGE_SIZE = 42 + LISTED = { + 'projects': ['PROJECT1', 'PROJECT2'], + 'filter_': '%s AND logName:%s' % (FILTER, self.LOGGER_NAME), + 'order_by': DESCENDING, + 'page_size': PAGE_SIZE, + 'page_token': TOKEN, + } + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + entries, token = logger.list_entries( + projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, + page_size=PAGE_SIZE, page_token=TOKEN) + self.assertEqual(len(entries), 0) + self.assertEqual(token, None) + self.assertEqual(client._listed, LISTED) + class _Connection(object): @@ -167,6 +209,13 @@ def api_request(self, **kw): class _Client(object): + _listed = _token = None + _entries = () + def __init__(self, project, connection=None): self.project = project self.connection = connection + + def list_entries(self, **kw): + self._listed = kw + return self._entries, self._token From a9c8515a010b53ec933781a166a5ebab7b830e0a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 10 Mar 2016 16:27:12 -0500 Subject: [PATCH 15/68] Add 'Sink.create' API wrapper and 'Client.sink' factory. --- docs/index.rst | 1 + docs/logging-sink.rst | 7 +++ gcloud/logging/client.py | 20 +++++++ gcloud/logging/sink.py | 92 +++++++++++++++++++++++++++++ gcloud/logging/test_client.py | 19 ++++++ gcloud/logging/test_sink.py | 108 ++++++++++++++++++++++++++++++++++ 6 files changed, 247 insertions(+) create mode 100644 docs/logging-sink.rst create mode 100644 gcloud/logging/sink.py create mode 100644 gcloud/logging/test_sink.py diff --git a/docs/index.rst b/docs/index.rst index e2edfb05c7e8..4770b33da657 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -114,6 +114,7 @@ Client logging-logger logging-entries + logging-sink .. toctree:: :maxdepth: 0 diff --git a/docs/logging-sink.rst b/docs/logging-sink.rst new file mode 100644 index 000000000000..bbfb62130f27 --- /dev/null +++ b/docs/logging-sink.rst @@ -0,0 +1,7 @@ +Sinks +===== + +.. automodule:: gcloud.logging.sink + :members: + :undoc-members: + :show-inheritance: diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 7f053eeeb775..ba011e831f6d 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -20,6 +20,7 @@ from gcloud.logging.entries import StructEntry from gcloud.logging.entries import TextEntry from gcloud.logging.logger import Logger +from gcloud.logging.sink import Sink class Client(JSONClient): @@ -134,3 +135,22 @@ def list_entries(self, projects=None, filter_=None, order_by=None, entries = [self._entry_from_resource(resource, loggers) for resource in resp.get('entries', ())] return entries, resp.get('nextPageToken') + + def sink(self, name, filter_, destination): + """Creates a sink bound to the current client. + + :type name: string + :param name: the name of the sink to be constructed. + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by + the sink. + + :rtype: :class:`gcloud.pubsub.sink.Sink` + :returns: Sink created with the current client. + """ + return Sink(name, filter_, destination, client=self) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py new file mode 100644 index 000000000000..a5c0e614f135 --- /dev/null +++ b/gcloud/logging/sink.py @@ -0,0 +1,92 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Logging API Sinks.""" + + +class Sink(object): + """Sinks represent filtered exports for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks + + :type name: string + :param name: the name of the sink + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + exported by the sink. + + :type destination: string + :param destination: destination URI for the entries exported by the sink. + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the sink (which requires a project). + """ + def __init__(self, name, filter_, destination, client): + self.name = name + self.filter_ = filter_ + self.destination = destination + self._client = client + + @property + def client(self): + """Clent bound to the sink.""" + return self._client + + @property + def project(self): + """Project bound to the sink.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in sink APIs""" + return 'projects/%s/sinks/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the sink's APIs""" + return '/%s' % (self.full_name) + + def _require_client(self, client): + """Check client or verify over-ride. + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the sink via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/create + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + data = { + 'name': self.name, + 'filter': self.filter_, + 'destination': self.destination, + } + client.connection.api_request(method='PUT', path=self.path, data=data) diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index 11a6935fd1b2..a56640cc9e1c 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -19,6 +19,9 @@ class TestClient(unittest2.TestCase): PROJECT = 'PROJECT' LOGGER_NAME = 'LOGGER_NAME' + SINK_NAME = 'SINK_NAME' + FILTER = 'logName:syslog AND severity>=ERROR' + DESTINATION_URI = 'faux.googleapis.com/destination' def _getTargetClass(self): from gcloud.logging.client import Client @@ -33,9 +36,11 @@ def test_ctor(self): self.assertEqual(client.project, self.PROJECT) def test_logger(self): + from gcloud.logging.logger import Logger creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) logger = client.logger(self.LOGGER_NAME) + self.assertTrue(isinstance(logger, Logger)) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertTrue(logger.client is client) self.assertEqual(logger.project, self.PROJECT) @@ -100,6 +105,7 @@ def test_list_entries_explicit(self): from gcloud._helpers import UTC from gcloud.logging import DESCENDING from gcloud.logging.entries import StructEntry + from gcloud.logging.logger import Logger from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos PROJECT1 = 'PROJECT1' PROJECT2 = 'PROJECT2' @@ -142,6 +148,7 @@ def test_list_entries_explicit(self): self.assertEqual(entry.payload, PAYLOAD) self.assertEqual(entry.timestamp, NOW) logger = entry.logger + self.assertTrue(isinstance(logger, Logger)) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertTrue(logger.client is client) self.assertEqual(logger.project, self.PROJECT) @@ -152,6 +159,18 @@ def test_list_entries_explicit(self): self.assertEqual(req['path'], '/entries:list') self.assertEqual(req['data'], SENT) + def test_sink(self): + from gcloud.logging.sink import Sink + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + sink = client.sink(self.SINK_NAME, self.FILTER, self.DESTINATION_URI) + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + class _Credentials(object): diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py new file mode 100644 index 000000000000..67d99bcf8aa2 --- /dev/null +++ b/gcloud/logging/test_sink.py @@ -0,0 +1,108 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestSink(unittest2.TestCase): + + PROJECT = 'test-project' + SINK_NAME = 'sink-name' + FILTER = 'logName:syslog AND severity>=INFO' + DESTINATION_URI = 'faux.googleapis.com/destination' + + def _getTargetClass(self): + from gcloud.logging.sink import Sink + return Sink + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn = _Connection() + client = _Client(self.PROJECT, conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink.client is client) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + self.assertEqual(sink.path, '/%s' % (FULL,)) + + def test_create_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection({'name': FULL}) + client = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client) + sink.create() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_create_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn1 = _Connection({'name': FULL}) + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({'name': FULL}) + client2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=client1) + sink.create(client=client2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: # pragma: NO COVER + raise NotFound('miss') + else: + return response + + +class _Client(object): + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection From 47cf5aba62e43eea778b6a4c41b738504cb30d3e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 10 Mar 2016 16:35:23 -0500 Subject: [PATCH 16/68] Add 'Sink.exists' API wrapper. --- gcloud/logging/sink.py | 21 +++++++++++++++++++++ gcloud/logging/test_sink.py | 27 +++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index a5c0e614f135..16a44929404e 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -14,6 +14,8 @@ """Define Logging API Sinks.""" +from gcloud.exceptions import NotFound + class Sink(object): """Sinks represent filtered exports for log entries. @@ -90,3 +92,22 @@ def create(self, client=None): 'destination': self.destination, } client.connection.api_request(method='PUT', path=self.path, data=data) + + def exists(self, client=None): + """API call: test for the existence of the sink via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path) + except NotFound: + return False + else: + return True diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py index 67d99bcf8aa2..9e530396f178 100644 --- a/gcloud/logging/test_sink.py +++ b/gcloud/logging/test_sink.py @@ -82,6 +82,33 @@ def test_create_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % FULL) self.assertEqual(req['data'], RESOURCE) + def test_exists_miss_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + self.assertFalse(sink.exists()) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_exists_hit_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn1 = _Connection({'name': FULL}) + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({'name': FULL}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + self.assertTrue(sink.exists(client=CLIENT2)) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + class _Connection(object): From 629418bb02778d08d7cbba98bbcc8374cf8f39a7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 10 Mar 2016 16:44:08 -0500 Subject: [PATCH 17/68] Add 'Sink.reload' API wrapper. --- gcloud/logging/sink.py | 15 ++++++++++++ gcloud/logging/test_sink.py | 49 +++++++++++++++++++++++++++++++++++-- 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index 16a44929404e..c817bc52ff7e 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -111,3 +111,18 @@ def exists(self, client=None): return False else: return True + + def reload(self, client=None): + """API call: sync local sink configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + data = client.connection.api_request(method='GET', path=self.path) + self.filter_ = data['filter'] + self.destination = data['destination'] diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py index 9e530396f178..0869789a543a 100644 --- a/gcloud/logging/test_sink.py +++ b/gcloud/logging/test_sink.py @@ -68,7 +68,7 @@ def test_create_w_alternate_client(self): 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } - conn1 = _Connection({'name': FULL}) + conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({'name': FULL}) client2 = _Client(project=self.PROJECT, connection=conn2) @@ -96,7 +96,7 @@ def test_exists_miss_w_bound_client(self): def test_exists_hit_w_alternate_client(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) - conn1 = _Connection({'name': FULL}) + conn1 = _Connection() CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({'name': FULL}) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) @@ -109,6 +109,51 @@ def test_exists_hit_w_alternate_client(self): self.assertEqual(req['method'], 'GET') self.assertEqual(req['path'], '/%s' % FULL) + def test_reload_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + sink.reload() + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_reload_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + NEW_FILTER = 'logName:syslog AND severity>=INFO' + NEW_DESTINATION_URI = 'faux.googleapis.com/other' + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': NEW_FILTER, + 'destination': NEW_DESTINATION_URI, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + sink.reload(client=CLIENT2) + self.assertEqual(sink.filter_, NEW_FILTER) + self.assertEqual(sink.destination, NEW_DESTINATION_URI) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + class _Connection(object): From 014b619a9dfab0fe238f90e3b82a14fd029f91e8 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Mar 2016 17:48:29 -0500 Subject: [PATCH 18/68] Add 'metric.Metric' class and its 'create' API wrapper. --- docs/index.rst | 1 + docs/logging-metric.rst | 7 ++ gcloud/logging/metric.py | 95 +++++++++++++++++++++++++++ gcloud/logging/test_metric.py | 119 ++++++++++++++++++++++++++++++++++ 4 files changed, 222 insertions(+) create mode 100644 docs/logging-metric.rst create mode 100644 gcloud/logging/metric.py create mode 100644 gcloud/logging/test_metric.py diff --git a/docs/index.rst b/docs/index.rst index 4770b33da657..52fefe8468bf 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -114,6 +114,7 @@ Client logging-logger logging-entries + logging-metric logging-sink .. toctree:: diff --git a/docs/logging-metric.rst b/docs/logging-metric.rst new file mode 100644 index 000000000000..343634e8307d --- /dev/null +++ b/docs/logging-metric.rst @@ -0,0 +1,7 @@ +Metrics +======= + +.. automodule:: gcloud.logging.metric + :members: + :undoc-members: + :show-inheritance: diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py new file mode 100644 index 000000000000..8129700fce2f --- /dev/null +++ b/gcloud/logging/metric.py @@ -0,0 +1,95 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define Logging API Metrics.""" + + +class Metric(object): + """Metrics represent named filters for log entries. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics + + :type name: string + :param name: the name of the metric + + :type filter_: string + :param filter_: the advanced logs filter expression defining the entries + tracked by the metric. + + :type client: :class:`gcloud.logging.client.Client` + :param client: A client which holds credentials and project configuration + for the metric (which requires a project). + + :type description: string + :param description: an optional description of the metric + """ + def __init__(self, name, filter_, client, description=''): + self.name = name + self._client = client + self.filter_ = filter_ + self.description = description + + @property + def client(self): + """Clent bound to the logger.""" + return self._client + + @property + def project(self): + """Project bound to the logger.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in metric APIs""" + return 'projects/%s/metrics/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the metric's APIs""" + return '/%s' % (self.full_name) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + + :rtype: :class:`gcloud.logging.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the metric via a PUT request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/create + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + data = { + 'name': self.name, + 'filter': self.filter_, + } + if self.description: + data['description'] = self.description + client.connection.api_request(method='PUT', path=self.path, data=data) diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py new file mode 100644 index 000000000000..453e60916aae --- /dev/null +++ b/gcloud/logging/test_metric.py @@ -0,0 +1,119 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest2 + + +class TestMetric(unittest2.TestCase): + + PROJECT = 'test-project' + METRIC_NAME = 'metric-name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + + def _getTargetClass(self): + from gcloud.logging.metric import Metric + return Metric + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection() + client = _Client(self.PROJECT, conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertTrue(metric.client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_ctor_explicit(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection() + client = _Client(self.PROJECT, conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, + client=client, description=self.DESCRIPTION) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertTrue(metric.client is client) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + self.assertEqual(metric.path, '/%s' % (FULL,)) + + def test_create_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + conn = _Connection({'name': FULL}) + client = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) + metric.create() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_create_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, + } + conn1 = _Connection({'name': FULL}) + client1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({'name': FULL}) + client2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, + description=self.DESCRIPTION) + metric.create(client=client2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + +class _Connection(object): + + def __init__(self, *responses): + self._responses = responses + self._requested = [] + + def api_request(self, **kw): + from gcloud.exceptions import NotFound + self._requested.append(kw) + + try: + response, self._responses = self._responses[0], self._responses[1:] + except: # pragma: NO COVER + raise NotFound('miss') + else: + return response + + +class _Client(object): + + def __init__(self, project, connection=None): + self.project = project + self.connection = connection From dd1d44add85df8324b982ecf535fcf32911e5430 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 10 Mar 2016 17:01:53 -0500 Subject: [PATCH 19/68] Add 'Sink.update' API wrapper. --- gcloud/logging/sink.py | 18 +++++++++++++++++ gcloud/logging/test_sink.py | 39 +++++++++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index c817bc52ff7e..8b9d022f08e2 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -126,3 +126,21 @@ def reload(self, client=None): data = client.connection.api_request(method='GET', path=self.path) self.filter_ = data['filter'] self.destination = data['destination'] + + def update(self, client=None): + """API call: update sink configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/update + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + data = { + 'name': self.name, + 'filter': self.filter_, + 'destination': self.destination, + } + client.connection.api_request(method='PUT', path=self.path, data=data) diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py index 0869789a543a..7da704e96624 100644 --- a/gcloud/logging/test_sink.py +++ b/gcloud/logging/test_sink.py @@ -154,6 +154,45 @@ def test_reload_w_alternate_client(self): self.assertEqual(req['method'], 'GET') self.assertEqual(req['path'], '/%s' % FULL) + def test_update_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + sink.update() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_update_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': self.SINK_NAME, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + sink.update(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + class _Connection(object): From 6407219a33d5444c35ed6cd390dbb73ae72c6b30 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 10 Mar 2016 17:07:58 -0500 Subject: [PATCH 20/68] Add 'Sink.delete' API wrapper. --- gcloud/logging/sink.py | 13 +++++++++++++ gcloud/logging/test_sink.py | 27 +++++++++++++++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index 8b9d022f08e2..4a2d19699bc4 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -144,3 +144,16 @@ def update(self, client=None): 'destination': self.destination, } client.connection.api_request(method='PUT', path=self.path, data=data) + + def delete(self, client=None): + """API call: delete a sink via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current sink. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py index 7da704e96624..69c3a4e6adaf 100644 --- a/gcloud/logging/test_sink.py +++ b/gcloud/logging/test_sink.py @@ -193,6 +193,33 @@ def test_update_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % FULL) self.assertEqual(req['data'], RESOURCE) + def test_delete_w_bound_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT) + sink.delete() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_delete_w_alternate_client(self): + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, + client=CLIENT1) + sink.delete(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + class _Connection(object): From 81cd8507295a7d8811cb751ae97c4ee75d44b31a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 11 Mar 2016 13:35:16 -0500 Subject: [PATCH 21/68] Add 'Sink.from_api_repr'. --- gcloud/logging/sink.py | 51 +++++++++++++++++++++++ gcloud/logging/test_sink.py | 80 +++++++++++++++++++++++++++++++++++++ 2 files changed, 131 insertions(+) diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index 4a2d19699bc4..983861e61d3a 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -14,9 +14,36 @@ """Define Logging API Sinks.""" +import re + +from gcloud._helpers import _name_from_project_path from gcloud.exceptions import NotFound +_SINK_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /sinks/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def _sink_name_from_path(path, project): + """Validate a sink URI path and get the sink name. + :type path: string + :param path: URI path for a sink API request. + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + :rtype: string + :returns: Metric name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _SINK_TEMPLATE) + + class Sink(object): """Sinks represent filtered exports for log entries. @@ -63,11 +90,35 @@ def path(self): """URL path for the sink's APIs""" return '/%s' % (self.full_name) + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a sink given its API representation + + :type resource: dict + :param resource: sink resource representation returned from the API + + :type client: :class:`gcloud.pubsub.client.Client` + :param client: Client which holds credentials and project + configuration for the sink. + + :rtype: :class:`gcloud.logging.sink.Sink` + :returns: Sink parsed from ``resource``. + :raises: :class:`ValueError` if ``client`` is not ``None`` and the + project from the resource does not agree with the project + from the client. + """ + sink_name = _sink_name_from_path(resource['name'], client.project) + filter_ = resource['filter'] + destination = resource['destination'] + return cls(sink_name, filter_, destination, client=client) + def _require_client(self, client): """Check client or verify over-ride. + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current sink. + :rtype: :class:`gcloud.logging.client.Client` :returns: The client passed in or the currently bound client. """ diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py index 69c3a4e6adaf..e78860a772a3 100644 --- a/gcloud/logging/test_sink.py +++ b/gcloud/logging/test_sink.py @@ -15,6 +15,38 @@ import unittest2 +class Test__sink_name_from_path(unittest2.TestCase): + + def _callFUT(self, path, project): + from gcloud.logging.sink import _sink_name_from_path + return _sink_name_from_path(path, project) + + def test_invalid_path_length(self): + PATH = 'projects/foo' + PROJECT = None + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_path_format(self): + SINK_NAME = 'SINK_NAME' + PROJECT = 'PROJECT' + PATH = 'foo/%s/bar/%s' % (PROJECT, SINK_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_project(self): + SINK_NAME = 'SINK_NAME' + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + PATH = 'projects/%s/sinks/%s' % (PROJECT1, SINK_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT2) + + def test_valid_data(self): + SINK_NAME = 'SINK_NAME' + PROJECT = 'PROJECT' + PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + sink_name = self._callFUT(PATH, PROJECT) + self.assertEqual(sink_name, SINK_NAME) + + class TestSink(unittest2.TestCase): PROJECT = 'test-project' @@ -43,6 +75,54 @@ def test_ctor(self): self.assertEqual(sink.full_name, FULL) self.assertEqual(sink.path, '/%s' % (FULL,)) + def test_from_api_repr_minimal(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink._client is CLIENT) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_from_api_repr_w_description(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + sink = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(sink.name, self.SINK_NAME) + self.assertEqual(sink.filter_, self.FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertTrue(sink._client is CLIENT) + self.assertEqual(sink.project, self.PROJECT) + self.assertEqual(sink.full_name, FULL) + + def test_from_api_repr_with_mismatched_project(self): + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + CLIENT = _Client(project=PROJECT1) + FULL = 'projects/%s/sinks/%s' % (PROJECT2, self.SINK_NAME) + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + 'destination': self.DESTINATION_URI, + } + klass = self._getTargetClass() + self.assertRaises(ValueError, klass.from_api_repr, + RESOURCE, client=CLIENT) + def test_create_w_bound_client(self): FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) RESOURCE = { From b63d62df14884ace3bdd31eefd0664ff776b94dc Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 11 Mar 2016 13:42:57 -0500 Subject: [PATCH 22/68] Add 'Client.list_sinks' API wrapper. --- gcloud/logging/client.py | 36 +++++++++++++ gcloud/logging/test_client.py | 99 +++++++++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 4f87fdf6cf71..1713d8d47538 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -154,3 +154,39 @@ def sink(self, name, filter_, destination): :returns: Sink created with the current client. """ return Sink(name, filter_, destination, client=self) + + def list_sinks(self, page_size=None, page_token=None): + """List sinks for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list + + :type page_size: int + :param page_size: maximum number of sinks to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of sinks. If not + passed, the API will return the first page of + sinks. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.sink.Sink`, plus a + "next page token" string: if not None, indicates that + more sinks can be retrieved with another call (pass that + value as ``page_token``). + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/sinks' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + sinks = [Sink.from_api_repr(resource, self) + for resource in resp.get('sinks', ())] + return sinks, resp.get('nextPageToken') diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index a56640cc9e1c..94a9997e978e 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -171,6 +171,105 @@ def test_sink(self): self.assertTrue(sink.client is client) self.assertEqual(sink.project, self.PROJECT) + def test_list_sinks_no_paging(self): + from gcloud.logging.sink import Sink + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + + RETURNED = { + 'sinks': [{ + 'name': SINK_PATH, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }], + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + sinks, next_page_token = CLIENT_OBJ.list_sinks() + # Test values are correct. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/sinks' % (PROJECT,)) + self.assertEqual(req['query_params'], {}) + + def test_list_sinks_with_paging(self): + from gcloud.logging.sink import Sink + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + SINK_NAME = 'sink_name' + FILTER = 'logName:syslog AND severity>=ERROR' + SINK_PATH = 'projects/%s/sinks/%s' % (PROJECT, SINK_NAME) + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + RETURNED = { + 'sinks': [{ + 'name': SINK_PATH, + 'filter': FILTER, + 'destination': self.DESTINATION_URI, + }], + 'nextPageToken': TOKEN2, + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + sinks, next_page_token = CLIENT_OBJ.list_sinks(SIZE, TOKEN1) + # Test values are correct. + self.assertEqual(len(sinks), 1) + sink = sinks[0] + self.assertTrue(isinstance(sink, Sink)) + self.assertEqual(sink.name, SINK_NAME) + self.assertEqual(sink.filter_, FILTER) + self.assertEqual(sink.destination, self.DESTINATION_URI) + self.assertEqual(next_page_token, TOKEN2) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/sinks' % (PROJECT,)) + self.assertEqual(req['query_params'], + {'pageSize': SIZE, 'pageToken': TOKEN1}) + + def test_list_sinks_missing_key(self): + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + RETURNED = {} + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + sinks, next_page_token = CLIENT_OBJ.list_sinks() + # Test values are correct. + self.assertEqual(len(sinks), 0) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/sinks' % PROJECT) + self.assertEqual(req['query_params'], {}) + class _Credentials(object): From 5baba7085232b0aa2c6b592320243f0ed9fcb579 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 11 Mar 2016 20:13:33 -0500 Subject: [PATCH 23/68] Use tuple for format string arg. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1584#discussion_r55909099 --- gcloud/logging/metric.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index 8129700fce2f..ceeef3f0dd44 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -59,7 +59,7 @@ def full_name(self): @property def path(self): """URL path for the metric's APIs""" - return '/%s' % (self.full_name) + return '/%s' % (self.full_name,) def _require_client(self, client): """Check client or verify over-ride. From 9a56835a00676ce232a72f7b92c2ce12979a24f6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 16:09:31 -0500 Subject: [PATCH 24/68] Add 'Metrics.exists' API wrapper. --- gcloud/logging/metric.py | 21 +++++++++++++++++++++ gcloud/logging/test_metric.py | 25 +++++++++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index ceeef3f0dd44..72a5e36ca844 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -14,6 +14,8 @@ """Define Logging API Metrics.""" +from gcloud.exceptions import NotFound + class Metric(object): """Metrics represent named filters for log entries. @@ -93,3 +95,22 @@ def create(self, client=None): if self.description: data['description'] = self.description client.connection.api_request(method='PUT', path=self.path, data=data) + + def exists(self, client=None): + """API call: test for the existence of the metric via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + + try: + client.connection.api_request(method='GET', path=self.path) + except NotFound: + return False + else: + return True diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index 453e60916aae..29c13cd96c42 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -93,6 +93,31 @@ def test_create_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % FULL) self.assertEqual(req['data'], RESOURCE) + def test_exists_miss_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + self.assertFalse(metric.exists()) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_exists_hit_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn1 = _Connection({'name': FULL}) + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({'name': FULL}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) + self.assertTrue(metric.exists(client=CLIENT2)) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + class _Connection(object): From b66acf562b4d6566edc716685724931606801cb2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 11:30:37 -0500 Subject: [PATCH 25/68] Add 'Metrics.reload' API wrapper. --- gcloud/logging/metric.py | 15 ++++++++++++ gcloud/logging/test_metric.py | 45 ++++++++++++++++++++++++++++++++++- 2 files changed, 59 insertions(+), 1 deletion(-) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index 72a5e36ca844..c4a358a93abb 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -114,3 +114,18 @@ def exists(self, client=None): return False else: return True + + def reload(self, client=None): + """API call: sync local metric configuration via a GET request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/get + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + data = client.connection.api_request(method='GET', path=self.path) + self.description = data.get('description', '') + self.filter_ = data['filter'] diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index 29c13cd96c42..a491d4e0c845 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -106,7 +106,7 @@ def test_exists_miss_w_bound_client(self): def test_exists_hit_w_alternate_client(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) - conn1 = _Connection({'name': FULL}) + conn1 = _Connection() CLIENT1 = _Client(project=self.PROJECT, connection=conn1) conn2 = _Connection({'name': FULL}) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) @@ -118,6 +118,49 @@ def test_exists_hit_w_alternate_client(self): self.assertEqual(req['method'], 'GET') self.assertEqual(req['path'], '/%s' % FULL) + def test_reload_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': NEW_FILTER, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT, + description=DESCRIPTION) + metric.reload() + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, '') + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_reload_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + NEW_FILTER = 'logName:syslog AND severity>=INFO' + RESOURCE = { + 'name': self.METRIC_NAME, + 'description': DESCRIPTION, + 'filter': NEW_FILTER, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) + metric.reload(client=CLIENT2) + self.assertEqual(metric.filter_, NEW_FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/%s' % FULL) + class _Connection(object): From 3010551166a9fe0c0fd14ffacdfc106f6e4796dd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 11:53:07 -0500 Subject: [PATCH 26/68] Add 'Metrics.update' API wrapper. --- docs/logging-usage.rst | 2 -- gcloud/logging/metric.py | 16 +++++++++++++++ gcloud/logging/test_metric.py | 38 +++++++++++++++++++++++++++++++++++ 3 files changed, 54 insertions(+), 2 deletions(-) diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst index 6711bf207a0a..d0312ea9224c 100644 --- a/docs/logging-usage.rst +++ b/docs/logging-usage.rst @@ -186,8 +186,6 @@ Update a metric: >>> from gcloud import logging >>> client = logging.Client() >>> metric = client.metric("robots") - >>> metric.exists() # API call - True >>> metric.reload() # API call >>> metric.description = "Danger, Will Robinson!" >>> metric.update() # API call diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index c4a358a93abb..cc8d4d783329 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -129,3 +129,19 @@ def reload(self, client=None): data = client.connection.api_request(method='GET', path=self.path) self.description = data.get('description', '') self.filter_ = data['filter'] + + def update(self, client=None): + """API call: update metric configuration via a PUT request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/update + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + data = {'name': self.name, 'filter': self.filter_} + if self.description: + data['description'] = self.description + client.connection.api_request(method='PUT', path=self.path, data=data) diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index a491d4e0c845..a592176a7084 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -161,6 +161,44 @@ def test_reload_w_alternate_client(self): self.assertEqual(req['method'], 'GET') self.assertEqual(req['path'], '/%s' % FULL) + def test_update_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': self.METRIC_NAME, + 'filter': self.FILTER, + } + conn = _Connection(RESOURCE) + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + metric.update() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + + def test_update_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + RESOURCE = { + 'name': self.METRIC_NAME, + 'description': DESCRIPTION, + 'filter': self.FILTER, + } + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection(RESOURCE) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1, + description=DESCRIPTION) + metric.update(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'PUT') + self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['data'], RESOURCE) + class _Connection(object): From 23feb302be05ff604ae16244c6388c1533739f3c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 14 Mar 2016 12:57:04 -0400 Subject: [PATCH 27/68] Restore doc assertion that metric exists. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1587#discussion_r55942824 --- docs/logging-usage.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/logging-usage.rst b/docs/logging-usage.rst index d0312ea9224c..6711bf207a0a 100644 --- a/docs/logging-usage.rst +++ b/docs/logging-usage.rst @@ -186,6 +186,8 @@ Update a metric: >>> from gcloud import logging >>> client = logging.Client() >>> metric = client.metric("robots") + >>> metric.exists() # API call + True >>> metric.reload() # API call >>> metric.description = "Danger, Will Robinson!" >>> metric.update() # API call From 75b1e790690f090d9d7ad01e89ac9fc0d846e1d3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 12:05:42 -0500 Subject: [PATCH 28/68] Add 'Metrics.delete' API wrapper. --- gcloud/logging/metric.py | 13 +++++++++++++ gcloud/logging/test_metric.py | 25 +++++++++++++++++++++++++ 2 files changed, 38 insertions(+) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index cc8d4d783329..3f5c95c3809d 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -145,3 +145,16 @@ def update(self, client=None): if self.description: data['description'] = self.description client.connection.api_request(method='PUT', path=self.path, data=data) + + def delete(self, client=None): + """API call: delete a metric via a DELETE request + + See + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/delete + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current metric. + """ + client = self._require_client(client) + client.connection.api_request(method='DELETE', path=self.path) diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index a592176a7084..43f5b1efb2ec 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -199,6 +199,31 @@ def test_update_w_alternate_client(self): self.assertEqual(req['path'], '/%s' % FULL) self.assertEqual(req['data'], RESOURCE) + def test_delete_w_bound_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT) + metric.delete() + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + + def test_delete_w_alternate_client(self): + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + conn1 = _Connection() + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + conn2 = _Connection({}) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=CLIENT1) + metric.delete(client=CLIENT2) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'DELETE') + self.assertEqual(req['path'], '/%s' % FULL) + class _Connection(object): From 04be54675d3bff91db9993ff2df496aea7c52205 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 13:06:08 -0500 Subject: [PATCH 29/68] Add 'Metric.from_api_repr'. --- gcloud/logging/metric.py | 53 ++++++++++++++++++++++++ gcloud/logging/test_metric.py | 76 +++++++++++++++++++++++++++++++++++ 2 files changed, 129 insertions(+) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index 3f5c95c3809d..983756f05700 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -14,9 +14,39 @@ """Define Logging API Metrics.""" +import re + +from gcloud._helpers import _name_from_project_path from gcloud.exceptions import NotFound +_METRIC_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /metrics/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def _metric_name_from_path(path, project): + """Validate a metric URI path and get the metric name. + + :type path: string + :param path: URI path for a metric API request. + + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + + :rtype: string + :returns: Metric name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _METRIC_TEMPLATE) + + class Metric(object): """Metrics represent named filters for log entries. @@ -63,6 +93,29 @@ def path(self): """URL path for the metric's APIs""" return '/%s' % (self.full_name,) + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a metric given its API representation + + :type resource: dict + :param resource: metric resource representation returned from the API + + :type client: :class:`gcloud.pubsub.client.Client` + :param client: Client which holds credentials and project + configuration for the metric. + + :rtype: :class:`gcloud.logging.metric.Metric` + :returns: Metric parsed from ``resource``. + :raises: :class:`ValueError` if ``client`` is not ``None`` and the + project from the resource does not agree with the project + from the client. + """ + metric_name = _metric_name_from_path(resource['name'], client.project) + filter_ = resource['filter'] + description = resource.get('description', '') + return cls(metric_name, filter_, client=client, + description=description) + def _require_client(self, client): """Check client or verify over-ride. diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index 43f5b1efb2ec..6c28fad3443c 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -15,6 +15,38 @@ import unittest2 +class Test__metric_name_from_path(unittest2.TestCase): + + def _callFUT(self, path, project): + from gcloud.logging.metric import _metric_name_from_path + return _metric_name_from_path(path, project) + + def test_invalid_path_length(self): + PATH = 'projects/foo' + PROJECT = None + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_path_format(self): + METRIC_NAME = 'METRIC_NAME' + PROJECT = 'PROJECT' + PATH = 'foo/%s/bar/%s' % (PROJECT, METRIC_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT) + + def test_invalid_project(self): + METRIC_NAME = 'METRIC_NAME' + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + PATH = 'projects/%s/metrics/%s' % (PROJECT1, METRIC_NAME) + self.assertRaises(ValueError, self._callFUT, PATH, PROJECT2) + + def test_valid_data(self): + METRIC_NAME = 'METRIC_NAME' + PROJECT = 'PROJECT' + PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + metric_name = self._callFUT(PATH, PROJECT) + self.assertEqual(metric_name, METRIC_NAME) + + class TestMetric(unittest2.TestCase): PROJECT = 'test-project' @@ -56,6 +88,50 @@ def test_ctor_explicit(self): self.assertEqual(metric.full_name, FULL) self.assertEqual(metric.path, '/%s' % (FULL,)) + def test_from_api_repr_minimal(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, '') + self.assertTrue(metric._client is CLIENT) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_from_api_repr_w_description(self): + CLIENT = _Client(project=self.PROJECT) + FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + DESCRIPTION = 'DESCRIPTION' + RESOURCE = { + 'name': FULL, + 'filter': self.FILTER, + 'description': DESCRIPTION, + } + klass = self._getTargetClass() + metric = klass.from_api_repr(RESOURCE, client=CLIENT) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertTrue(metric._client is CLIENT) + self.assertEqual(metric.project, self.PROJECT) + self.assertEqual(metric.full_name, FULL) + + def test_from_api_repr_with_mismatched_project(self): + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + CLIENT = _Client(project=PROJECT1) + FULL = 'projects/%s/metrics/%s' % (PROJECT2, self.METRIC_NAME) + RESOURCE = {'name': FULL, 'filter': self.FILTER} + klass = self._getTargetClass() + self.assertRaises(ValueError, klass.from_api_repr, + RESOURCE, client=CLIENT) + def test_create_w_bound_client(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) RESOURCE = { From 42988068e14620397d888800d1043d8cf6725adf Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 13:16:02 -0500 Subject: [PATCH 30/68] Add 'Client.list_metrics' API wrapper. --- gcloud/logging/client.py | 40 +++++++++++++- gcloud/logging/test_client.py | 99 +++++++++++++++++++++++++++++++++++ 2 files changed, 138 insertions(+), 1 deletion(-) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 1713d8d47538..982252e9d053 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -20,6 +20,7 @@ from gcloud.logging.entries import StructEntry from gcloud.logging.entries import TextEntry from gcloud.logging.logger import Logger +from gcloud.logging.metric import Metric from gcloud.logging.sink import Sink @@ -79,7 +80,8 @@ def _entry_from_resource(self, resource, loggers): raise ValueError('Cannot parse job resource') def list_entries(self, projects=None, filter_=None, order_by=None, - page_size=None, page_token=None): + page_size=None, + page_token=None): """Return a page of log entries. See: @@ -112,6 +114,7 @@ def list_entries(self, projects=None, filter_=None, order_by=None, more topics can be retrieved with another call (pass that value as ``page_token``). """ + # pylint: disable=too-many-branches if projects is None: projects = [self.project] @@ -190,3 +193,38 @@ def list_sinks(self, page_size=None, page_token=None): sinks = [Sink.from_api_repr(resource, self) for resource in resp.get('sinks', ())] return sinks, resp.get('nextPageToken') + + def list_metrics(self, page_size=None, page_token=None): + """List metrics for the project associated with this client. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list + + :type page_size: int + :param page_size: maximum number of metrics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of metrics. If not + passed, the API will return the first page of + metrics. + + :rtype: tuple, (list, str) + :returns: list of :class:`gcloud.logging.metric.Metric`, plus a + "next page token" string: if not None, indicates that + more metrics can be retrieved with another call (pass that + """ + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/metrics' % (self.project,) + resp = self.connection.api_request(method='GET', path=path, + query_params=params) + metrics = [Metric.from_api_repr(resource, self) + for resource in resp.get('metrics', ())] + return metrics, resp.get('nextPageToken') diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index 94a9997e978e..e9e6d0f105e0 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -270,6 +270,105 @@ def test_list_sinks_missing_key(self): self.assertEqual(req['path'], '/projects/%s/sinks' % PROJECT) self.assertEqual(req['query_params'], {}) + def test_list_metrics_no_paging(self): + from gcloud.logging.metric import Metric + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + METRIC_NAME = 'metric_name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + + RETURNED = { + 'metrics': [{ + 'name': METRIC_PATH, + 'filter': FILTER, + 'description': DESCRIPTION, + }], + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + metrics, next_page_token = CLIENT_OBJ.list_metrics() + # Test values are correct. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, METRIC_NAME) + self.assertEqual(metric.filter_, FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) + self.assertEqual(req['query_params'], {}) + + def test_list_metrics_with_paging(self): + from gcloud.logging.metric import Metric + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + METRIC_NAME = 'metric_name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + RETURNED = { + 'metrics': [{ + 'name': METRIC_PATH, + 'filter': FILTER, + 'description': DESCRIPTION, + }], + 'nextPageToken': TOKEN2, + } + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + metrics, next_page_token = CLIENT_OBJ.list_metrics(SIZE, TOKEN1) + # Test values are correct. + self.assertEqual(len(metrics), 1) + metric = metrics[0] + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, METRIC_NAME) + self.assertEqual(metric.filter_, FILTER) + self.assertEqual(metric.description, DESCRIPTION) + self.assertEqual(next_page_token, TOKEN2) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) + self.assertEqual(req['query_params'], + {'pageSize': SIZE, 'pageToken': TOKEN1}) + + def test_list_metrics_missing_key(self): + PROJECT = 'PROJECT' + CREDS = _Credentials() + + CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) + + RETURNED = {} + # Replace the connection on the client with one of our own. + CLIENT_OBJ.connection = _Connection(RETURNED) + + # Execute request. + metrics, next_page_token = CLIENT_OBJ.list_metrics() + # Test values are correct. + self.assertEqual(len(metrics), 0) + self.assertEqual(next_page_token, None) + self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) + req = CLIENT_OBJ.connection._requested[0] + self.assertEqual(req['method'], 'GET') + self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) + self.assertEqual(req['query_params'], {}) + class _Credentials(object): From 5d342faef2ec1762ddfd42e1be9dccdfdbe43c81 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 8 Mar 2016 13:28:10 -0500 Subject: [PATCH 31/68] Add 'Client.metric' factory. --- gcloud/logging/client.py | 18 ++++++++++++++ gcloud/logging/test_client.py | 47 +++++++++++++++++++++-------------- 2 files changed, 47 insertions(+), 18 deletions(-) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 982252e9d053..5f7cd9046db3 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -194,6 +194,24 @@ def list_sinks(self, page_size=None, page_token=None): for resource in resp.get('sinks', ())] return sinks, resp.get('nextPageToken') + def metric(self, name, filter_, description=''): + """Creates a metric bound to the current client. + + :type name: string + :param name: the name of the metric to be constructed. + + :type filter_: string + :param filter_: the advanced logs filter expression defining the + entries tracked by the metric. + + :type description: string + :param description: the description of the metric to be constructed. + + :rtype: :class:`gcloud.pubsub.logger.Logger` + :returns: Logger created with the current client. + """ + return Metric(name, filter_, client=self, description=description) + def list_metrics(self, page_size=None, page_token=None): """List metrics for the project associated with this client. diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index e9e6d0f105e0..59bc10a177f9 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -22,6 +22,9 @@ class TestClient(unittest2.TestCase): SINK_NAME = 'SINK_NAME' FILTER = 'logName:syslog AND severity>=ERROR' DESTINATION_URI = 'faux.googleapis.com/destination' + METRIC_NAME = 'metric_name' + FILTER = 'logName:syslog AND severity>=ERROR' + DESCRIPTION = 'DESCRIPTION' def _getTargetClass(self): from gcloud.logging.client import Client @@ -270,6 +273,20 @@ def test_list_sinks_missing_key(self): self.assertEqual(req['path'], '/projects/%s/sinks' % PROJECT) self.assertEqual(req['query_params'], {}) + def test_metric(self): + from gcloud.logging.metric import Metric + creds = _Credentials() + + client_obj = self._makeOne(project=self.PROJECT, credentials=creds) + metric = client_obj.metric(self.METRIC_NAME, self.FILTER, + description=self.DESCRIPTION) + self.assertTrue(isinstance(metric, Metric)) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) + self.assertTrue(metric.client is client_obj) + self.assertEqual(metric.project, self.PROJECT) + def test_list_metrics_no_paging(self): from gcloud.logging.metric import Metric PROJECT = 'PROJECT' @@ -277,16 +294,13 @@ def test_list_metrics_no_paging(self): CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - METRIC_NAME = 'metric_name' - FILTER = 'logName:syslog AND severity>=ERROR' - DESCRIPTION = 'DESCRIPTION' - METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, self.METRIC_NAME) RETURNED = { 'metrics': [{ 'name': METRIC_PATH, - 'filter': FILTER, - 'description': DESCRIPTION, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, }], } # Replace the connection on the client with one of our own. @@ -298,9 +312,9 @@ def test_list_metrics_no_paging(self): self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertTrue(isinstance(metric, Metric)) - self.assertEqual(metric.name, METRIC_NAME) - self.assertEqual(metric.filter_, FILTER) - self.assertEqual(metric.description, DESCRIPTION) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) self.assertEqual(next_page_token, None) self.assertEqual(len(CLIENT_OBJ.connection._requested), 1) req = CLIENT_OBJ.connection._requested[0] @@ -315,18 +329,15 @@ def test_list_metrics_with_paging(self): CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - METRIC_NAME = 'metric_name' - FILTER = 'logName:syslog AND severity>=ERROR' - DESCRIPTION = 'DESCRIPTION' - METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, METRIC_NAME) + METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, self.METRIC_NAME) TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 RETURNED = { 'metrics': [{ 'name': METRIC_PATH, - 'filter': FILTER, - 'description': DESCRIPTION, + 'filter': self.FILTER, + 'description': self.DESCRIPTION, }], 'nextPageToken': TOKEN2, } @@ -339,9 +350,9 @@ def test_list_metrics_with_paging(self): self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertTrue(isinstance(metric, Metric)) - self.assertEqual(metric.name, METRIC_NAME) - self.assertEqual(metric.filter_, FILTER) - self.assertEqual(metric.description, DESCRIPTION) + self.assertEqual(metric.name, self.METRIC_NAME) + self.assertEqual(metric.filter_, self.FILTER) + self.assertEqual(metric.description, self.DESCRIPTION) self.assertEqual(next_page_token, TOKEN2) req = CLIENT_OBJ.connection._requested[0] self.assertEqual(req['path'], '/projects/%s/metrics' % PROJECT) From affbc5d3be0107de85241ce46ec46a19b8f33cec Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 14 Mar 2016 13:51:51 -0400 Subject: [PATCH 32/68] Undo extra linewrap, maybe added during conflict resolution. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1590#discussion_r56043988 --- gcloud/logging/client.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 5f7cd9046db3..7a3917eb5720 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -80,8 +80,7 @@ def _entry_from_resource(self, resource, loggers): raise ValueError('Cannot parse job resource') def list_entries(self, projects=None, filter_=None, order_by=None, - page_size=None, - page_token=None): + page_size=None, page_token=None): """Return a page of log entries. See: From 3101fb00f931027ae4d3e564112a1d666b8e1e76 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 14 Mar 2016 14:11:21 -0400 Subject: [PATCH 33/68] Drop no-longer-needed pylint disable. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1590#discussion_r56044689 --- gcloud/logging/client.py | 1 - 1 file changed, 1 deletion(-) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 7a3917eb5720..b59908e334bc 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -113,7 +113,6 @@ def list_entries(self, projects=None, filter_=None, order_by=None, more topics can be retrieved with another call (pass that value as ``page_token``). """ - # pylint: disable=too-many-branches if projects is None: projects = [self.project] From 02eecd1edf2a198a87b6f59709c22f6a024251bd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 14 Mar 2016 14:12:24 -0400 Subject: [PATCH 34/68] Fix docstring copy-pasta. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1590#discussion_r56045255. --- gcloud/logging/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index b59908e334bc..8978c79b3a64 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -205,8 +205,8 @@ def metric(self, name, filter_, description=''): :type description: string :param description: the description of the metric to be constructed. - :rtype: :class:`gcloud.pubsub.logger.Logger` - :returns: Logger created with the current client. + :rtype: :class:`gcloud.pubsub.metric.Metric` + :returns: Metric created with the current client. """ return Metric(name, filter_, client=self, description=description) From f1ca149df7569d12df0c4dcaea48a24d8cbad01d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 14 Mar 2016 14:14:25 -0400 Subject: [PATCH 35/68] Re-add docstring line lost in conflict resolution. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1590#discussion_r56045365 --- gcloud/logging/client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 8978c79b3a64..0344dde22555 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -229,6 +229,7 @@ def list_metrics(self, page_size=None, page_token=None): :returns: list of :class:`gcloud.logging.metric.Metric`, plus a "next page token" string: if not None, indicates that more metrics can be retrieved with another call (pass that + value as ``page_token``). """ params = {} From 15526dfdedd846f9470083a5cec36884dfb26ba4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Mar 2016 17:32:09 -0400 Subject: [PATCH 36/68] Fix verb/target for 'Metric.create'/'Sink.create'. - Spec mandates 'POST' to the container, rather than 'PUT' to the putative URL of the entity itself. - Note that metric creation worked before; sink creation is blocked due to permission issues (see #1614). --- gcloud/logging/metric.py | 3 ++- gcloud/logging/sink.py | 3 ++- gcloud/logging/test_metric.py | 18 +++++++++--------- gcloud/logging/test_sink.py | 16 ++++++++-------- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index 983756f05700..0e79f931e7ea 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -141,13 +141,14 @@ def create(self, client=None): ``client`` stored on the current metric. """ client = self._require_client(client) + target = '/projects/%s/metrics' % (self.project,) data = { 'name': self.name, 'filter': self.filter_, } if self.description: data['description'] = self.description - client.connection.api_request(method='PUT', path=self.path, data=data) + client.connection.api_request(method='POST', path=target, data=data) def exists(self, client=None): """API call: test for the existence of the metric via a GET request diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index 983861e61d3a..d7fb24fa98c9 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -137,12 +137,13 @@ def create(self, client=None): ``client`` stored on the current sink. """ client = self._require_client(client) + target = '/projects/%s/sinks' % (self.project,) data = { 'name': self.name, 'filter': self.filter_, 'destination': self.destination, } - client.connection.api_request(method='PUT', path=self.path, data=data) + client.connection.api_request(method='POST', path=target, data=data) def exists(self, client=None): """API call: test for the existence of the sink via a GET request diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index 6c28fad3443c..1d68394fe13c 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -133,31 +133,31 @@ def test_from_api_repr_with_mismatched_project(self): RESOURCE, client=CLIENT) def test_create_w_bound_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + TARGET = 'projects/%s/metrics' % (self.PROJECT,) RESOURCE = { 'name': self.METRIC_NAME, 'filter': self.FILTER, } - conn = _Connection({'name': FULL}) + conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client) metric.create() self.assertEqual(len(conn._requested), 1) req = conn._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) self.assertEqual(req['data'], RESOURCE) def test_create_w_alternate_client(self): - FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) + TARGET = 'projects/%s/metrics' % (self.PROJECT,) RESOURCE = { 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': self.DESCRIPTION, } - conn1 = _Connection({'name': FULL}) + conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection({'name': FULL}) + conn2 = _Connection(RESOURCE) client2 = _Client(project=self.PROJECT, connection=conn2) metric = self._makeOne(self.METRIC_NAME, self.FILTER, client=client1, description=self.DESCRIPTION) @@ -165,8 +165,8 @@ def test_create_w_alternate_client(self): self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) req = conn2._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) self.assertEqual(req['data'], RESOURCE) def test_exists_miss_w_bound_client(self): diff --git a/gcloud/logging/test_sink.py b/gcloud/logging/test_sink.py index e78860a772a3..103aa0ab6b8c 100644 --- a/gcloud/logging/test_sink.py +++ b/gcloud/logging/test_sink.py @@ -124,25 +124,25 @@ def test_from_api_repr_with_mismatched_project(self): RESOURCE, client=CLIENT) def test_create_w_bound_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + TARGET = 'projects/%s/sinks' % (self.PROJECT,) RESOURCE = { 'name': self.SINK_NAME, 'filter': self.FILTER, 'destination': self.DESTINATION_URI, } - conn = _Connection({'name': FULL}) + conn = _Connection(RESOURCE) client = _Client(project=self.PROJECT, connection=conn) sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client) sink.create() self.assertEqual(len(conn._requested), 1) req = conn._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) self.assertEqual(req['data'], RESOURCE) def test_create_w_alternate_client(self): - FULL = 'projects/%s/sinks/%s' % (self.PROJECT, self.SINK_NAME) + TARGET = 'projects/%s/sinks' % (self.PROJECT,) RESOURCE = { 'name': self.SINK_NAME, 'filter': self.FILTER, @@ -150,7 +150,7 @@ def test_create_w_alternate_client(self): } conn1 = _Connection() client1 = _Client(project=self.PROJECT, connection=conn1) - conn2 = _Connection({'name': FULL}) + conn2 = _Connection(RESOURCE) client2 = _Client(project=self.PROJECT, connection=conn2) sink = self._makeOne(self.SINK_NAME, self.FILTER, self.DESTINATION_URI, client=client1) @@ -158,8 +158,8 @@ def test_create_w_alternate_client(self): self.assertEqual(len(conn1._requested), 0) self.assertEqual(len(conn2._requested), 1) req = conn2._requested[0] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['path'], '/%s' % FULL) + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/%s' % TARGET) self.assertEqual(req['data'], RESOURCE) def test_exists_miss_w_bound_client(self): From d7062b0d6e003f394ef66bc38166795da9a48e79 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Mar 2016 11:24:03 -0400 Subject: [PATCH 37/68] System test for 'logger.log_text' and 'logger.log_struct'. --- system_tests/logging_.py | 71 +++++++++++++++++++++++++++++++++ system_tests/run_system_test.py | 3 ++ 2 files changed, 74 insertions(+) create mode 100644 system_tests/logging_.py diff --git a/system_tests/logging_.py b/system_tests/logging_.py new file mode 100644 index 000000000000..9b3e99a453b7 --- /dev/null +++ b/system_tests/logging_.py @@ -0,0 +1,71 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import unittest2 + +from gcloud import _helpers +from gcloud.environment_vars import TESTS_PROJECT +from gcloud import logging + + +DEFAULT_LOGGER_NAME = 'system-tests-%d' % (1000 * time.time(),) + + +class Config(object): + """Run-time configuration to be modified at set-up. + + This is a mutable stand-in to allow test set-up to modify + global state. + """ + CLIENT = None + + +def setUpModule(): + _helpers.PROJECT = TESTS_PROJECT + Config.CLIENT = logging.Client() + + +class TestLogging(unittest2.TestCase): + + def setUp(self): + self.to_delete = [] + + def tearDown(self): + for doomed in self.to_delete: + doomed.delete() + + def test_log_text(self): + TEXT_PAYLOAD = 'System test: test_log_text' + logger = Config.CLIENT.logger(DEFAULT_LOGGER_NAME) + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + + def test_log_struct(self): + JSON_PAYLOAD = { + 'message': 'System test: test_log_struct', + 'weather': 'partly cloudy', + } + logger = Config.CLIENT.logger(DEFAULT_LOGGER_NAME) + self.to_delete.append(logger) + logger.log_struct(JSON_PAYLOAD) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, JSON_PAYLOAD) diff --git a/system_tests/run_system_test.py b/system_tests/run_system_test.py index 317a4cd8ab89..553b58d29a95 100644 --- a/system_tests/run_system_test.py +++ b/system_tests/run_system_test.py @@ -22,6 +22,7 @@ import bigtable import bigtable_happybase import datastore +import logging_ import pubsub import storage import system_test_utils @@ -34,6 +35,7 @@ 'bigquery': ['project', 'credentials'], 'bigtable': ['project', 'credentials'], 'bigtable-happybase': ['project', 'credentials'], + 'logging': ['project', 'credentials'], } TEST_MODULES = { 'datastore': datastore, @@ -42,6 +44,7 @@ 'bigquery': bigquery, 'bigtable': bigtable, 'bigtable-happybase': bigtable_happybase, + 'logging': logging_, } From 97be8176973e8088a292ee43263c8ce117d7a5d3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Mar 2016 11:46:51 -0400 Subject: [PATCH 38/68] Add system test for 'Client.metric'/'Metric.create'. --- system_tests/logging_.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index 9b3e99a453b7..a4072bc65ab8 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -21,7 +21,10 @@ from gcloud import logging -DEFAULT_LOGGER_NAME = 'system-tests-%d' % (1000 * time.time(),) +DEFAULT_LOGGER_NAME = 'system-tests-logger-%d' % (1000 * time.time(),) +DEFAULT_METRIC_NAME = 'system-tests-metric-%d' % (1000 * time.time(),) +DEFAULT_FILTER = 'logName:syslog AND severity>=INFO' +DEFAULT_DESCRIPTION = 'System testing' class Config(object): @@ -69,3 +72,11 @@ def test_log_struct(self): entries, _ = logger.list_entries() self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, JSON_PAYLOAD) + + def test_create_metric(self): + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + metric.create() + self.to_delete.append(metric) + self.assertTrue(metric.exists()) From 61004b723e4fe0261e1580bdf4c259988a0deb75 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Mar 2016 11:57:02 -0400 Subject: [PATCH 39/68] Add system test for 'Client.list_metrics'. Adjust 'Metric.from_api_repr' and associated tests to accomodate the fact that the 'LogMetric' resource does not contain a fully-qualified path, but only the simple name. --- gcloud/logging/metric.py | 5 +---- gcloud/logging/test_client.py | 7 ++----- gcloud/logging/test_metric.py | 14 ++------------ system_tests/logging_.py | 14 ++++++++++++++ 4 files changed, 19 insertions(+), 21 deletions(-) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index 0e79f931e7ea..38b7e3ad3d04 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -106,11 +106,8 @@ def from_api_repr(cls, resource, client): :rtype: :class:`gcloud.logging.metric.Metric` :returns: Metric parsed from ``resource``. - :raises: :class:`ValueError` if ``client`` is not ``None`` and the - project from the resource does not agree with the project - from the client. """ - metric_name = _metric_name_from_path(resource['name'], client.project) + metric_name = resource['name'] filter_ = resource['filter'] description = resource.get('description', '') return cls(metric_name, filter_, client=client, diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index 59bc10a177f9..fe38bd50fc4f 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -294,11 +294,9 @@ def test_list_metrics_no_paging(self): CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, self.METRIC_NAME) - RETURNED = { 'metrics': [{ - 'name': METRIC_PATH, + 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': self.DESCRIPTION, }], @@ -329,13 +327,12 @@ def test_list_metrics_with_paging(self): CLIENT_OBJ = self._makeOne(project=PROJECT, credentials=CREDS) - METRIC_PATH = 'projects/%s/metrics/%s' % (PROJECT, self.METRIC_NAME) TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 RETURNED = { 'metrics': [{ - 'name': METRIC_PATH, + 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': self.DESCRIPTION, }], diff --git a/gcloud/logging/test_metric.py b/gcloud/logging/test_metric.py index 1d68394fe13c..cbba9d1c4252 100644 --- a/gcloud/logging/test_metric.py +++ b/gcloud/logging/test_metric.py @@ -92,7 +92,7 @@ def test_from_api_repr_minimal(self): CLIENT = _Client(project=self.PROJECT) FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) RESOURCE = { - 'name': FULL, + 'name': self.METRIC_NAME, 'filter': self.FILTER, } klass = self._getTargetClass() @@ -109,7 +109,7 @@ def test_from_api_repr_w_description(self): FULL = 'projects/%s/metrics/%s' % (self.PROJECT, self.METRIC_NAME) DESCRIPTION = 'DESCRIPTION' RESOURCE = { - 'name': FULL, + 'name': self.METRIC_NAME, 'filter': self.FILTER, 'description': DESCRIPTION, } @@ -122,16 +122,6 @@ def test_from_api_repr_w_description(self): self.assertEqual(metric.project, self.PROJECT) self.assertEqual(metric.full_name, FULL) - def test_from_api_repr_with_mismatched_project(self): - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' - CLIENT = _Client(project=PROJECT1) - FULL = 'projects/%s/metrics/%s' % (PROJECT2, self.METRIC_NAME) - RESOURCE = {'name': FULL, 'filter': self.FILTER} - klass = self._getTargetClass() - self.assertRaises(ValueError, klass.from_api_repr, - RESOURCE, client=CLIENT) - def test_create_w_bound_client(self): TARGET = 'projects/%s/metrics' % (self.PROJECT,) RESOURCE = { diff --git a/system_tests/logging_.py b/system_tests/logging_.py index a4072bc65ab8..fd9e285ba620 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -80,3 +80,17 @@ def test_create_metric(self): metric.create() self.to_delete.append(metric) self.assertTrue(metric.exists()) + + def test_list_metrics(self): + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + before_metrics, _ = Config.CLIENT.list_metrics() + before_names = set(metric.name for metric in before_metrics) + metric.create() + self.to_delete.append(metric) + self.assertTrue(metric.exists()) + after_metrics, _ = Config.CLIENT.list_metrics() + after_names = set(metric.name for metric in after_metrics) + self.assertEqual(after_names - before_names, + set([DEFAULT_METRIC_NAME])) From e0492bd73d58349567be54f122143df128bdce32 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Mar 2016 12:08:29 -0400 Subject: [PATCH 40/68] Add system test for 'Metric.reload'. --- system_tests/logging_.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index fd9e285ba620..f38d633d4ed6 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -94,3 +94,15 @@ def test_list_metrics(self): after_names = set(metric.name for metric in after_metrics) self.assertEqual(after_names - before_names, set([DEFAULT_METRIC_NAME])) + + def test_reload_metric(self): + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + metric.create() + self.to_delete.append(metric) + metric.filter_ = 'logName:other' + metric.description = 'local changes' + metric.reload() + self.assertEqual(metric.filter_, DEFAULT_FILTER) + self.assertEqual(metric.description, DEFAULT_DESCRIPTION) From e3d1783eaa3414ce1db002195a38749062c78278 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Mar 2016 12:13:42 -0400 Subject: [PATCH 41/68] Add system test for 'Metric.update'. --- system_tests/logging_.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index f38d633d4ed6..486ee664c34f 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -106,3 +106,20 @@ def test_reload_metric(self): metric.reload() self.assertEqual(metric.filter_, DEFAULT_FILTER) self.assertEqual(metric.description, DEFAULT_DESCRIPTION) + + def test_update_metric(self): + NEW_FILTER = 'logName:other' + NEW_DESCRIPTION = 'updated' + metric = Config.CLIENT.metric( + DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) + self.assertFalse(metric.exists()) + metric.create() + self.to_delete.append(metric) + metric.filter_ = NEW_FILTER + metric.description = NEW_DESCRIPTION + metric.update() + after_metrics, _ = Config.CLIENT.list_metrics() + after_info = dict((metric.name, metric) for metric in after_metrics) + after = after_info[DEFAULT_METRIC_NAME] + self.assertEqual(after.filter_, NEW_FILTER) + self.assertEqual(after.description, NEW_DESCRIPTION) From 76a47d947742874473bc90895a37f34771d9cd92 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 15 Mar 2016 12:47:39 -0400 Subject: [PATCH 42/68] Add system test for 'Sink.create'. --- system_tests/logging_.py | 29 +++++++++++++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index 486ee664c34f..da421a6a2668 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -21,10 +21,13 @@ from gcloud import logging -DEFAULT_LOGGER_NAME = 'system-tests-logger-%d' % (1000 * time.time(),) -DEFAULT_METRIC_NAME = 'system-tests-metric-%d' % (1000 * time.time(),) +_MILLIS = 1000 * time.time() +DEFAULT_LOGGER_NAME = 'system-tests-logger-%d' % (_MILLIS,) +DEFAULT_METRIC_NAME = 'system-tests-metric-%d' % (_MILLIS,) +DEFAULT_SINK_NAME = 'system-tests-sink-%d' % (_MILLIS,) DEFAULT_FILTER = 'logName:syslog AND severity>=INFO' DEFAULT_DESCRIPTION = 'System testing' +BUCKET_NAME = 'gcloud-python-system-testing-%d' % (_MILLIS,) class Config(object): @@ -123,3 +126,25 @@ def test_update_metric(self): after = after_info[DEFAULT_METRIC_NAME] self.assertEqual(after.filter_, NEW_FILTER) self.assertEqual(after.description, NEW_DESCRIPTION) + + def test_create_sink_storage_bucket(self): + from gcloud import storage + BUCKET_URI = 'storage.googleapis.com/%s' % (BUCKET_NAME,) + + # Create the destination bucket, and set up the ACL to allow + # Cloud Logging to write into it. + storage_client = storage.Client() + bucket = storage_client.create_bucket(BUCKET_NAME) + self.to_delete.append(bucket) + bucket.acl.reload() + logs_group = bucket.acl.group('cloud-logs@google.com') + logs_group.grant_owner() + bucket.acl.add_entity(logs_group) + bucket.acl.save() + + sink = Config.CLIENT.sink( + DEFAULT_SINK_NAME, DEFAULT_FILTER, BUCKET_URI) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) From 7c52009f0f774432532796e882bed1bd8a0daf54 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 7 Mar 2016 11:37:16 -0500 Subject: [PATCH 43/68] Add support for parsing log entries w/ 'protoPayload' from resources. We can't feasibly allow writing such entries until we have an answer for --- gcloud/logging/client.py | 6 +- gcloud/logging/entries.py | 13 +++- gcloud/logging/test_client.py | 30 ++++++++- gcloud/logging/test_entries.py | 108 +++++++++++++++++++++++++++++++++ 4 files changed, 153 insertions(+), 4 deletions(-) diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index 0344dde22555..afb7496db8a6 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -17,6 +17,7 @@ from gcloud.client import JSONClient from gcloud.logging.connection import Connection +from gcloud.logging.entries import ProtobufEntry from gcloud.logging.entries import StructEntry from gcloud.logging.entries import TextEntry from gcloud.logging.logger import Logger @@ -71,13 +72,16 @@ def _entry_from_resource(self, resource, loggers): :rtype; One of: :class:`gcloud.logging.entries.TextEntry`, :class:`gcloud.logging.entries.StructEntry`, + :class:`gcloud.logging.entries.ProtobufEntry` :returns: the entry instance, constructed via the resource """ if 'textPayload' in resource: return TextEntry.from_api_repr(resource, self, loggers) elif 'jsonPayload' in resource: return StructEntry.from_api_repr(resource, self, loggers) - raise ValueError('Cannot parse job resource') + elif 'protoPayload' in resource: + return ProtobufEntry.from_api_repr(resource, self, loggers) + raise ValueError('Cannot parse log entry resource') def list_entries(self, projects=None, filter_=None, order_by=None, page_size=None, page_token=None): diff --git a/gcloud/logging/entries.py b/gcloud/logging/entries.py index badeea2bb81b..b867bf208765 100644 --- a/gcloud/logging/entries.py +++ b/gcloud/logging/entries.py @@ -76,7 +76,7 @@ def from_api_repr(cls, resource, client, loggers=None): class TextEntry(_BaseEntry): - """Entry created via a write request with ``textPayload``. + """Entry created with ``textPayload``. See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry @@ -85,9 +85,18 @@ class TextEntry(_BaseEntry): class StructEntry(_BaseEntry): - """Entry created via a write request with ``jsonPayload``. + """Entry created with ``jsonPayload``. See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry """ _PAYLOAD_KEY = 'jsonPayload' + + +class ProtobufEntry(_BaseEntry): + """Entry created with ``protoPayload``. + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry + """ + _PAYLOAD_KEY = 'protoPayload' diff --git a/gcloud/logging/test_client.py b/gcloud/logging/test_client.py index fe38bd50fc4f..2ac27234ad6e 100644 --- a/gcloud/logging/test_client.py +++ b/gcloud/logging/test_client.py @@ -104,9 +104,11 @@ def test_list_entries_defaults(self): self.assertEqual(req['data'], SENT) def test_list_entries_explicit(self): + # pylint: disable=too-many-statements from datetime import datetime from gcloud._helpers import UTC from gcloud.logging import DESCENDING + from gcloud.logging.entries import ProtobufEntry from gcloud.logging.entries import StructEntry from gcloud.logging.logger import Logger from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos @@ -116,7 +118,10 @@ def test_list_entries_explicit(self): NOW = datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) IID1 = 'IID1' + IID2 = 'IID2' PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD['@type'] = 'type.googleapis.com/testing.example' TOKEN = 'TOKEN' PAGE_SIZE = 42 SENT = { @@ -136,6 +141,15 @@ def test_list_entries_explicit(self): 'timestamp': TIMESTAMP, 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), + }, { + 'protoPayload': PROTO_PAYLOAD, + 'insertId': IID2, + 'resource': { + 'type': 'global', + }, + 'timestamp': TIMESTAMP, + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), }], } creds = _Credentials() @@ -144,7 +158,8 @@ def test_list_entries_explicit(self): entries, token = client.list_entries( projects=[PROJECT1, PROJECT2], filter_=FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN) - self.assertEqual(len(entries), 1) + self.assertEqual(len(entries), 2) + entry = entries[0] self.assertTrue(isinstance(entry, StructEntry)) self.assertEqual(entry.insert_id, IID1) @@ -155,6 +170,19 @@ def test_list_entries_explicit(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertTrue(logger.client is client) self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertTrue(isinstance(entry, ProtobufEntry)) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertTrue(entries[0].logger is entries[1].logger) + self.assertEqual(token, None) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py index 40815ba6ff7c..219499f3edc4 100644 --- a/gcloud/logging/test_entries.py +++ b/gcloud/logging/test_entries.py @@ -221,6 +221,114 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertTrue(entry.logger is LOGGER) +class TestProtobufEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import ProtobufEntry + return ProtobufEntry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + PAYLOAD = {'@type': 'type.googleapis.com/testing.example', + 'message': 'MESSAGE', 'weather': 'partly cloudy'} + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + + def test_ctor_explicit(self): + import datetime + PAYLOAD = {'@type': 'type.googleapis.com/testing.example', + 'message': 'MESSAGE', 'weather': 'partly cloudy'} + IID = 'IID' + TIMESTAMP = datetime.datetime.now() + logger = _Logger(self.LOGGER_NAME, self.PROJECT) + entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.logger is logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, TIMESTAMP) + + def test_from_api_repr_missing_data_no_loggers(self): + client = _Client(self.PROJECT) + PAYLOAD = {'@type': 'type.googleapis.com/testing.example', + 'message': 'MESSAGE', 'weather': 'partly cloudy'} + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'protoPayload': PAYLOAD, + 'logName': LOG_NAME, + } + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client) + self.assertEqual(entry.payload, PAYLOAD) + self.assertTrue(entry.insert_id is None) + self.assertTrue(entry.timestamp is None) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + + def test_from_api_repr_w_loggers_no_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = {'@type': 'type.googleapis.com/testing.example', + 'message': 'MESSAGE', 'weather': 'partly cloudy'} + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'protoPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + } + loggers = {} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + logger = entry.logger + self.assertTrue(isinstance(logger, _Logger)) + self.assertTrue(logger.client is client) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertEqual(loggers, {LOG_NAME: logger}) + + def test_from_api_repr_w_loggers_w_logger_match(self): + from datetime import datetime + from gcloud._helpers import UTC + client = _Client(self.PROJECT) + PAYLOAD = {'@type': 'type.googleapis.com/testing.example', + 'message': 'MESSAGE', 'weather': 'partly cloudy'} + IID = 'IID' + NOW = datetime.utcnow().replace(tzinfo=UTC) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + API_REPR = { + 'protoPayload': PAYLOAD, + 'logName': LOG_NAME, + 'insertId': IID, + 'timestamp': TIMESTAMP, + } + LOGGER = object() + loggers = {LOG_NAME: LOGGER} + klass = self._getTargetClass() + entry = klass.from_api_repr(API_REPR, client, loggers=loggers) + self.assertEqual(entry.payload, PAYLOAD) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertTrue(entry.logger is LOGGER) + + def _datetime_to_rfc3339_w_nanos(value): from gcloud._helpers import _RFC3339_NO_FRACTION no_fraction = value.strftime(_RFC3339_NO_FRACTION) From fd8965c5938906f76472d746262503ecf8d9945b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 22 Mar 2016 11:24:15 -0400 Subject: [PATCH 44/68] Test '_BaseEntry' directly, drop tests for methodless subclasses. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1578#issuecomment-199583452 --- gcloud/logging/test_entries.py | 227 ++------------------------------- 1 file changed, 10 insertions(+), 217 deletions(-) diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py index 219499f3edc4..17136f2559d0 100644 --- a/gcloud/logging/test_entries.py +++ b/gcloud/logging/test_entries.py @@ -15,14 +15,18 @@ import unittest2 -class TestTextEntry(unittest2.TestCase): +class Test_BaseEntry(unittest2.TestCase): PROJECT = 'PROJECT' LOGGER_NAME = 'LOGGER_NAME' def _getTargetClass(self): - from gcloud.logging.entries import TextEntry - return TextEntry + from gcloud.logging.entries import _BaseEntry + + class _Dummy(_BaseEntry): + _PAYLOAD_KEY = 'dummyPayload' + + return _Dummy def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) @@ -53,7 +57,7 @@ def test_from_api_repr_missing_data_no_loggers(self): PAYLOAD = 'PAYLOAD' LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) API_REPR = { - 'textPayload': PAYLOAD, + 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, } klass = self._getTargetClass() @@ -76,7 +80,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) API_REPR = { - 'textPayload': PAYLOAD, + 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, @@ -103,218 +107,7 @@ def test_from_api_repr_w_loggers_w_logger_match(self): TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) API_REPR = { - 'textPayload': PAYLOAD, - 'logName': LOG_NAME, - 'insertId': IID, - 'timestamp': TIMESTAMP, - } - LOGGER = object() - loggers = {LOG_NAME: LOGGER} - klass = self._getTargetClass() - entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - self.assertEqual(entry.payload, PAYLOAD) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - self.assertTrue(entry.logger is LOGGER) - - -class TestStructEntry(unittest2.TestCase): - - PROJECT = 'PROJECT' - LOGGER_NAME = 'LOGGER_NAME' - - def _getTargetClass(self): - from gcloud.logging.entries import StructEntry - return StructEntry - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_ctor_defaults(self): - PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} - logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger) - self.assertEqual(entry.payload, PAYLOAD) - self.assertTrue(entry.logger is logger) - self.assertTrue(entry.insert_id is None) - self.assertTrue(entry.timestamp is None) - - def test_ctor_explicit(self): - import datetime - PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} - IID = 'IID' - TIMESTAMP = datetime.datetime.now() - logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP) - self.assertEqual(entry.payload, PAYLOAD) - self.assertTrue(entry.logger is logger) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, TIMESTAMP) - - def test_from_api_repr_missing_data_no_loggers(self): - client = _Client(self.PROJECT) - PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - API_REPR = { - 'jsonPayload': PAYLOAD, - 'logName': LOG_NAME, - } - klass = self._getTargetClass() - entry = klass.from_api_repr(API_REPR, client) - self.assertEqual(entry.payload, PAYLOAD) - self.assertTrue(entry.insert_id is None) - self.assertTrue(entry.timestamp is None) - logger = entry.logger - self.assertTrue(isinstance(logger, _Logger)) - self.assertTrue(logger.client is client) - self.assertEqual(logger.name, self.LOGGER_NAME) - - def test_from_api_repr_w_loggers_no_logger_match(self): - from datetime import datetime - from gcloud._helpers import UTC - client = _Client(self.PROJECT) - PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} - IID = 'IID' - NOW = datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - API_REPR = { - 'jsonPayload': PAYLOAD, - 'logName': LOG_NAME, - 'insertId': IID, - 'timestamp': TIMESTAMP, - } - loggers = {} - klass = self._getTargetClass() - entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - self.assertEqual(entry.payload, PAYLOAD) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - logger = entry.logger - self.assertTrue(isinstance(logger, _Logger)) - self.assertTrue(logger.client is client) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertEqual(loggers, {LOG_NAME: logger}) - - def test_from_api_repr_w_loggers_w_logger_match(self): - from datetime import datetime - from gcloud._helpers import UTC - client = _Client(self.PROJECT) - PAYLOAD = {'message': 'MESSAGE', 'weather': 'partly cloudy'} - IID = 'IID' - NOW = datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - API_REPR = { - 'jsonPayload': PAYLOAD, - 'logName': LOG_NAME, - 'insertId': IID, - 'timestamp': TIMESTAMP, - } - LOGGER = object() - loggers = {LOG_NAME: LOGGER} - klass = self._getTargetClass() - entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - self.assertEqual(entry.payload, PAYLOAD) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - self.assertTrue(entry.logger is LOGGER) - - -class TestProtobufEntry(unittest2.TestCase): - - PROJECT = 'PROJECT' - LOGGER_NAME = 'LOGGER_NAME' - - def _getTargetClass(self): - from gcloud.logging.entries import ProtobufEntry - return ProtobufEntry - - def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) - - def test_ctor_defaults(self): - PAYLOAD = {'@type': 'type.googleapis.com/testing.example', - 'message': 'MESSAGE', 'weather': 'partly cloudy'} - logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger) - self.assertEqual(entry.payload, PAYLOAD) - self.assertTrue(entry.logger is logger) - self.assertTrue(entry.insert_id is None) - self.assertTrue(entry.timestamp is None) - - def test_ctor_explicit(self): - import datetime - PAYLOAD = {'@type': 'type.googleapis.com/testing.example', - 'message': 'MESSAGE', 'weather': 'partly cloudy'} - IID = 'IID' - TIMESTAMP = datetime.datetime.now() - logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP) - self.assertEqual(entry.payload, PAYLOAD) - self.assertTrue(entry.logger is logger) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, TIMESTAMP) - - def test_from_api_repr_missing_data_no_loggers(self): - client = _Client(self.PROJECT) - PAYLOAD = {'@type': 'type.googleapis.com/testing.example', - 'message': 'MESSAGE', 'weather': 'partly cloudy'} - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - API_REPR = { - 'protoPayload': PAYLOAD, - 'logName': LOG_NAME, - } - klass = self._getTargetClass() - entry = klass.from_api_repr(API_REPR, client) - self.assertEqual(entry.payload, PAYLOAD) - self.assertTrue(entry.insert_id is None) - self.assertTrue(entry.timestamp is None) - logger = entry.logger - self.assertTrue(isinstance(logger, _Logger)) - self.assertTrue(logger.client is client) - self.assertEqual(logger.name, self.LOGGER_NAME) - - def test_from_api_repr_w_loggers_no_logger_match(self): - from datetime import datetime - from gcloud._helpers import UTC - client = _Client(self.PROJECT) - PAYLOAD = {'@type': 'type.googleapis.com/testing.example', - 'message': 'MESSAGE', 'weather': 'partly cloudy'} - IID = 'IID' - NOW = datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - API_REPR = { - 'protoPayload': PAYLOAD, - 'logName': LOG_NAME, - 'insertId': IID, - 'timestamp': TIMESTAMP, - } - loggers = {} - klass = self._getTargetClass() - entry = klass.from_api_repr(API_REPR, client, loggers=loggers) - self.assertEqual(entry.payload, PAYLOAD) - self.assertEqual(entry.insert_id, IID) - self.assertEqual(entry.timestamp, NOW) - logger = entry.logger - self.assertTrue(isinstance(logger, _Logger)) - self.assertTrue(logger.client is client) - self.assertEqual(logger.name, self.LOGGER_NAME) - self.assertEqual(loggers, {LOG_NAME: logger}) - - def test_from_api_repr_w_loggers_w_logger_match(self): - from datetime import datetime - from gcloud._helpers import UTC - client = _Client(self.PROJECT) - PAYLOAD = {'@type': 'type.googleapis.com/testing.example', - 'message': 'MESSAGE', 'weather': 'partly cloudy'} - IID = 'IID' - NOW = datetime.utcnow().replace(tzinfo=UTC) - TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) - LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) - API_REPR = { - 'protoPayload': PAYLOAD, + 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, From 5d48c651d1e927d693493fa1a6e1bb9fa547f496 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 25 Mar 2016 11:08:23 -0400 Subject: [PATCH 45/68] Add 'Logger.log_proto'. See: #1577. --- gcloud/logging/logger.py | 35 +++++++++++++++++++++++++- gcloud/logging/test_logger.py | 47 +++++++++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 1 deletion(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index f071802bd5c9..26323f328811 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -14,6 +14,10 @@ """Define API Loggers.""" +import json + +from google.protobuf.json_format import MessageToJson + class Logger(object): """Loggers represent named targets for log entries. @@ -89,7 +93,7 @@ def log_text(self, text, client=None): method='POST', path='/entries:write', data=data) def log_struct(self, info, client=None): - """API call: log a text message via a POST request + """API call: log a structured message via a POST request See: https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write @@ -115,6 +119,35 @@ def log_struct(self, info, client=None): client.connection.api_request( method='POST', path='/entries:write', data=data) + def log_proto(self, message, client=None): + """API call: log a protobuf message via a POST request + + See: + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/write + + :type message: Protobuf message + :param message: the message to be logged + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current logger. + """ + client = self._require_client(client) + as_json_str = MessageToJson(message) + as_json = json.loads(as_json_str) + + data = { + 'entries': [{ + 'logName': self.full_name, + 'protoPayload': as_json, + 'resource': { + 'type': 'global', + }, + }], + } + client.connection.api_request( + method='POST', path='/entries:write', data=data) + def delete(self, client=None): """API call: delete all entries in a logger via a DELETE request diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 920233aaeab4..f17cfe85613e 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -127,6 +127,53 @@ def test_log_struct_w_explicit_client(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) + def test_log_proto_w_implicit_client(self): + from google.protobuf.unittest_pb2 import BoolMessage + MESSAGE = BoolMessage(data=True) + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + logger.log_proto(MESSAGE) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': {'data': True}, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_proto_w_explicit_client(self): + from google.protobuf.unittest_pb2 import BoolMessage + MESSAGE = BoolMessage(data=True) + conn = _Connection({}) + client1 = _Client(self.PROJECT, object()) + client2 = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client1) + logger.log_proto(MESSAGE, client=client2) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': {'data': True}, + 'resource': { + 'type': 'global', + }, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + def test_delete_w_bound_client(self): PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) conn = _Connection({}) From f6e9de84ec34fea50ee2f000ff6438640c1d1d31 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 25 Mar 2016 11:20:55 -0400 Subject: [PATCH 46/68] Add 'ProtobufEntry.parse_message' helper. Closes: #1577. --- gcloud/logging/entries.py | 12 ++++++++++++ gcloud/logging/test_entries.py | 23 +++++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/gcloud/logging/entries.py b/gcloud/logging/entries.py index b867bf208765..e97d5cb39bb4 100644 --- a/gcloud/logging/entries.py +++ b/gcloud/logging/entries.py @@ -14,6 +14,10 @@ """Log entries within the Google Cloud Logging API.""" +import json + +from google.protobuf.json_format import Parse + from gcloud._helpers import _rfc3339_nanos_to_datetime from gcloud.logging._helpers import logger_name_from_path @@ -100,3 +104,11 @@ class ProtobufEntry(_BaseEntry): https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/LogEntry """ _PAYLOAD_KEY = 'protoPayload' + + def parse_message(self, message): + """Parse payload into a protobuf message. + + :type message: Protobuf message + :param message: the message to be logged + """ + Parse(json.dumps(self.payload), message) diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py index 17136f2559d0..bef7b6ef6d12 100644 --- a/gcloud/logging/test_entries.py +++ b/gcloud/logging/test_entries.py @@ -122,6 +122,29 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertTrue(entry.logger is LOGGER) +class TestProtobufEntry(unittest2.TestCase): + + PROJECT = 'PROJECT' + LOGGER_NAME = 'LOGGER_NAME' + + def _getTargetClass(self): + from gcloud.logging.entries import ProtobufEntry + return ProtobufEntry + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_message(self): + from google.protobuf.unittest_pb2 import BoolMessage + LOGGER = object() + PAYLOAD = {'data': True} + message = BoolMessage(data=False) + self.assertFalse(message.data) + entry = self._makeOne(PAYLOAD, LOGGER) + entry.parse_message(message) + self.assertTrue(message.data) + + def _datetime_to_rfc3339_w_nanos(value): from gcloud._helpers import _RFC3339_NO_FRACTION no_fraction = value.strftime(_RFC3339_NO_FRACTION) From 1f1ecad997e46bc67ee9cfcf5bf023a397bf39fd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 24 Mar 2016 16:08:34 -0400 Subject: [PATCH 47/68] Add system test for 'Sink.create' using a Bigquery dataset. --- system_tests/logging_.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index da421a6a2668..fdef82679479 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -28,6 +28,7 @@ DEFAULT_FILTER = 'logName:syslog AND severity>=INFO' DEFAULT_DESCRIPTION = 'System testing' BUCKET_NAME = 'gcloud-python-system-testing-%d' % (_MILLIS,) +DATASET_NAME = 'system_testing_dataset_%d' % (_MILLIS,) class Config(object): @@ -148,3 +149,29 @@ def test_create_sink_storage_bucket(self): sink.create() self.to_delete.append(sink) self.assertTrue(sink.exists()) + + def test_create_sink_bigquery_dataset(self): + from gcloud import bigquery + from gcloud.bigquery.dataset import AccessGrant + DATASET_URI = 'bigquery.googleapis.com/projects/%s/datasets/%s' % ( + Config.CLIENT.project, DATASET_NAME,) + + # Create the destination dataset, and set up the ACL to allow + # Cloud Logging to write into it. + bigquery_client = bigquery.Client() + dataset = bigquery_client.dataset(DATASET_NAME) + dataset.create() + self.to_delete.append(dataset) + dataset.reload() + grants = dataset.access_grants + grants.append(AccessGrant( + 'WRITER', 'groupByEmail', 'cloud-logs@google.com')) + dataset.access_grants = grants + dataset.update() + + sink = Config.CLIENT.sink( + DEFAULT_SINK_NAME, DEFAULT_FILTER, DATASET_URI) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) From 2de3e0a2c8590a74d7c4f8ff91e210d93e153642 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 25 Mar 2016 13:41:26 -0400 Subject: [PATCH 48/68] Drop use of 'google.protobuf.unittest_pb2.BoolMessage'. See: https://github.com/google/protobuf/issues/1352. --- gcloud/logging/test_entries.py | 14 ++++++++------ gcloud/logging/test_logger.py | 20 ++++++++++++-------- 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py index bef7b6ef6d12..4505c7655ff6 100644 --- a/gcloud/logging/test_entries.py +++ b/gcloud/logging/test_entries.py @@ -134,15 +134,17 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def test_message(self): - from google.protobuf.unittest_pb2 import BoolMessage + def test_parse_message(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value LOGGER = object() - PAYLOAD = {'data': True} - message = BoolMessage(data=False) - self.assertFalse(message.data) + message = Struct(fields={'foo': Value(bool_value=False)}) + with_true = Struct(fields={'foo': Value(bool_value=True)}) + PAYLOAD = json.loads(MessageToJson(with_true)) entry = self._makeOne(PAYLOAD, LOGGER) entry.parse_message(message) - self.assertTrue(message.data) + self.assertTrue(message.fields['foo']) def _datetime_to_rfc3339_w_nanos(value): diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index f17cfe85613e..1c345f713f3e 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -128,19 +128,21 @@ def test_log_struct_w_explicit_client(self): self.assertEqual(req['data'], SENT) def test_log_proto_w_implicit_client(self): - from google.protobuf.unittest_pb2 import BoolMessage - MESSAGE = BoolMessage(data=True) + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) conn = _Connection({}) client = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client) - logger.log_proto(MESSAGE) + logger.log_proto(message) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { 'entries': [{ 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), - 'protoPayload': {'data': True}, + 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', }, @@ -151,20 +153,22 @@ def test_log_proto_w_implicit_client(self): self.assertEqual(req['data'], SENT) def test_log_proto_w_explicit_client(self): - from google.protobuf.unittest_pb2 import BoolMessage - MESSAGE = BoolMessage(data=True) + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) conn = _Connection({}) client1 = _Client(self.PROJECT, object()) client2 = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client1) - logger.log_proto(MESSAGE, client=client2) + logger.log_proto(message, client=client2) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { 'entries': [{ 'logName': 'projects/%s/logs/%s' % ( self.PROJECT, self.LOGGER_NAME), - 'protoPayload': {'data': True}, + 'protoPayload': json.loads(MessageToJson(message)), 'resource': { 'type': 'global', }, From e9a01eaa9054af218948acebd10662042cf6b1c0 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 25 Mar 2016 23:49:59 -0400 Subject: [PATCH 49/68] Document 'ProtobufEntry.parse_message' mutates passed-in message in place. Addreses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1661#discussion_r57493555 --- gcloud/logging/entries.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gcloud/logging/entries.py b/gcloud/logging/entries.py index e97d5cb39bb4..d94d7d984a1a 100644 --- a/gcloud/logging/entries.py +++ b/gcloud/logging/entries.py @@ -108,6 +108,8 @@ class ProtobufEntry(_BaseEntry): def parse_message(self, message): """Parse payload into a protobuf message. + Mutates the passed-in ``message`` in place. + :type message: Protobuf message :param message: the message to be logged """ From 159d8a3b449301f6d1562911b2c79e43f4dfbae4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 10:11:44 -0400 Subject: [PATCH 50/68] Add 'logger.Batch' for logging multiple entries via a single API call. Can be used as a context manager. See: #1565. --- gcloud/logging/logger.py | 95 ++++++++++++++ gcloud/logging/test_logger.py | 240 ++++++++++++++++++++++++++++++++++ 2 files changed, 335 insertions(+) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 26323f328811..2ddbc94c7839 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -65,6 +65,19 @@ def _require_client(self, client): client = self._client return client + def batch(self, client=None): + """Return a batch to use as a context manager. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: :class:`Batch` + :returns: A batch to use as a context manager. + """ + client = self._require_client(client) + return Batch(self, client) + def log_text(self, text, client=None): """API call: log a text message via a POST request @@ -204,3 +217,85 @@ def list_entries(self, projects=None, filter_=None, order_by=None, return self.client.list_entries( projects=projects, filter_=filter_, order_by=order_by, page_size=page_size, page_token=page_token) + + +class Batch(object): + """Context manager: collect entries to log via a single API call. + + Helper returned by :meth:`Logger.batch` + + :type logger: :class:`gcloud.logging.logger.Logger` + :param logger: the logger to which entries will be logged. + + :type client: :class:`gcloud.logging.client.Client` + :param client: The client to use. + """ + def __init__(self, logger, client): + self.logger = logger + self.entries = [] + self.client = client + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.commit() + + def log_text(self, text): + """Add a text entry to be logged during :meth:`commit`. + + :type text: string + :param text: the text entry + """ + self.entries.append(('text', text)) + + def log_struct(self, info): + """Add a struct entry to be logged during :meth:`commit`. + + :type info: dict + :param info: the struct entry + """ + self.entries.append(('struct', info)) + + def log_proto(self, message): + """Add a protobuf entry to be logged during :meth:`commit`. + + :type message: protobuf message + :param message: the protobuf entry + """ + self.entries.append(('proto', message)) + + def commit(self, client=None): + """Send saved log entries as a single API call. + + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + """ + if client is None: + client = self.client + data = {} + entries = data['entries'] = [] + for entry_type, entry in self.entries: + info = { + 'logName': self.logger.path, + 'resource': { + 'type': 'global', + }, + } + if entry_type == 'text': + info['textPayload'] = entry + elif entry_type == 'struct': + info['structPayload'] = entry + elif entry_type == 'proto': + as_json_str = MessageToJson(entry) + as_json = json.loads(as_json_str) + info['protoPayload'] = as_json + else: # pragma: NO COVER + raise ValueError('Unknown entry type: %s' % (entry_type,)) + entries.append(info) + + client.connection.api_request( + method='POST', path='/entries:write', data=data) + del self.entries[:] diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 1c345f713f3e..44b088b9ac63 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -37,6 +37,28 @@ def test_ctor(self): self.assertEqual(logger.full_name, 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)) + def test_batch_w_bound_client(self): + from gcloud.logging.logger import Batch + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client) + batch = logger.batch() + self.assertTrue(isinstance(batch, Batch)) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is client) + + def test_batch_w_alternate_client(self): + from gcloud.logging.logger import Batch + conn1 = _Connection() + conn2 = _Connection() + client1 = _Client(self.PROJECT, conn1) + client2 = _Client(self.PROJECT, conn2) + logger = self._makeOne(self.LOGGER_NAME, client=client1) + batch = logger.batch(client2) + self.assertTrue(isinstance(batch, Batch)) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is client2) + def test_log_text_w_str_implicit_client(self): TEXT = 'TEXT' conn = _Connection({}) @@ -246,6 +268,220 @@ def test_list_entries_explicit(self): self.assertEqual(client._listed, LISTED) +class TestBatch(unittest2.TestCase): + + PROJECT = 'test-project' + + def _getTargetClass(self): + from gcloud.logging.logger import Batch + return Batch + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_defaults(self): + logger = _Logger() + CLIENT = _Client(project=self.PROJECT) + batch = self._makeOne(logger, CLIENT) + self.assertTrue(batch.logger is logger) + self.assertTrue(batch.client is CLIENT) + self.assertEqual(len(batch.entries), 0) + + def test_log_text(self): + TEXT = 'This is the entry text' + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_text(TEXT) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, [('text', TEXT)]) + + def test_log_struct(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_struct(STRUCT) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, [('struct', STRUCT)]) + + def test_log_proto(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_proto(message) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, [('proto', message)]) + + def test_commit_w_bound_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = _Logger() + SENT = { + 'entries': [{ + 'logName': logger.path, + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + } + }, { + 'logName': logger.path, + 'structPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }, { + 'logName': logger.path, + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + }], + } + batch = self._makeOne(logger, client=CLIENT) + batch.log_text(TEXT) + batch.log_struct(STRUCT) + batch.log_proto(message) + batch.commit() + self.assertEqual(list(batch.entries), []) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_commit_w_alternate_client(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + conn1 = _Connection() + conn2 = _Connection({}) + CLIENT1 = _Client(project=self.PROJECT, connection=conn1) + CLIENT2 = _Client(project=self.PROJECT, connection=conn2) + logger = _Logger() + SENT = { + 'entries': [{ + 'logName': logger.path, + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + } + }, { + 'logName': logger.path, + 'structPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }, { + 'logName': logger.path, + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + }], + } + batch = self._makeOne(logger, client=CLIENT1) + batch.log_text(TEXT) + batch.log_struct(STRUCT) + batch.log_proto(message) + batch.commit(client=CLIENT2) + self.assertEqual(list(batch.entries), []) + self.assertEqual(len(conn1._requested), 0) + self.assertEqual(len(conn2._requested), 1) + req = conn2._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_context_mgr_success(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = _Logger() + SENT = { + 'entries': [{ + 'logName': logger.path, + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + } + }, { + 'logName': logger.path, + 'structPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + }, { + 'logName': logger.path, + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + }], + } + batch = self._makeOne(logger, client=CLIENT) + + with batch as other: + other.log_text(TEXT) + other.log_struct(STRUCT) + other.log_proto(message) + + self.assertEqual(list(batch.entries), []) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_context_mgr_failure(self): + from google.protobuf.struct_pb2 import Struct, Value + TEXT = 'This is the entry text' + STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + message = Struct(fields={'foo': Value(bool_value=True)}) + conn = _Connection({}) + CLIENT = _Client(project=self.PROJECT, connection=conn) + logger = _Logger() + UNSENT = [('text', TEXT), ('struct', STRUCT), ('proto', message)] + batch = self._makeOne(logger, client=CLIENT) + + try: + with batch as other: + other.log_text(TEXT) + other.log_struct(STRUCT) + other.log_proto(message) + raise _Bugout() + except _Bugout: + pass + + self.assertEqual(list(batch.entries), UNSENT) + self.assertEqual(len(conn._requested), 0) + + +class _Logger(object): + + def __init__(self, name="NAME", project="PROJECT"): + self.path = '/projects/%s/logs/%s' % (project, name) + + class _Connection(object): def __init__(self, *responses): @@ -270,3 +506,7 @@ def __init__(self, project, connection=None): def list_entries(self, **kw): self._listed = kw return self._entries, self._token + + +class _Bugout(Exception): + pass From 145ec60a80aad9de01b6b2e12091ec602854e468 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 12:04:30 -0400 Subject: [PATCH 51/68] Move repeated 'logName'/'resource' element to wrapper. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1665#discussion_r57584578 --- gcloud/logging/logger.py | 17 +++----- gcloud/logging/test_logger.py | 82 +++++++++++------------------------ 2 files changed, 32 insertions(+), 67 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 2ddbc94c7839..c824d4dbda68 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -275,23 +275,20 @@ def commit(self, client=None): """ if client is None: client = self.client - data = {} + data = { + 'logName': self.logger.path, + 'resource': {'type': 'global'}, + } entries = data['entries'] = [] for entry_type, entry in self.entries: - info = { - 'logName': self.logger.path, - 'resource': { - 'type': 'global', - }, - } if entry_type == 'text': - info['textPayload'] = entry + info = {'textPayload': entry} elif entry_type == 'struct': - info['structPayload'] = entry + info = {'structPayload': entry} elif entry_type == 'proto': as_json_str = MessageToJson(entry) as_json = json.loads(as_json_str) - info['protoPayload'] = as_json + info = {'protoPayload': as_json} else: # pragma: NO COVER raise ValueError('Unknown entry type: %s' % (entry_type,)) entries.append(info) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 44b088b9ac63..679ee2cb9640 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -329,25 +329,15 @@ def test_commit_w_bound_client(self): CLIENT = _Client(project=self.PROJECT, connection=conn) logger = _Logger() SENT = { - 'entries': [{ - 'logName': logger.path, - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - } - }, { - 'logName': logger.path, - 'structPayload': STRUCT, - 'resource': { - 'type': 'global', - }, - }, { - 'logName': logger.path, - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - }, - }], + 'logName': logger.path, + 'resource': { + 'type': 'global', + }, + 'entries': [ + {'textPayload': TEXT}, + {'structPayload': STRUCT}, + {'protoPayload': json.loads(MessageToJson(message))}, + ], } batch = self._makeOne(logger, client=CLIENT) batch.log_text(TEXT) @@ -374,25 +364,13 @@ def test_commit_w_alternate_client(self): CLIENT2 = _Client(project=self.PROJECT, connection=conn2) logger = _Logger() SENT = { - 'entries': [{ - 'logName': logger.path, - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - } - }, { - 'logName': logger.path, - 'structPayload': STRUCT, - 'resource': { - 'type': 'global', - }, - }, { - 'logName': logger.path, - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - }, - }], + 'logName': logger.path, + 'resource': {'type': 'global'}, + 'entries': [ + {'textPayload': TEXT}, + {'structPayload': STRUCT}, + {'protoPayload': json.loads(MessageToJson(message))}, + ], } batch = self._makeOne(logger, client=CLIENT1) batch.log_text(TEXT) @@ -418,25 +396,15 @@ def test_context_mgr_success(self): CLIENT = _Client(project=self.PROJECT, connection=conn) logger = _Logger() SENT = { - 'entries': [{ - 'logName': logger.path, - 'textPayload': TEXT, - 'resource': { - 'type': 'global', - } - }, { - 'logName': logger.path, - 'structPayload': STRUCT, - 'resource': { - 'type': 'global', - }, - }, { - 'logName': logger.path, - 'protoPayload': json.loads(MessageToJson(message)), - 'resource': { - 'type': 'global', - }, - }], + 'logName': logger.path, + 'resource': { + 'type': 'global', + }, + 'entries': [ + {'textPayload': TEXT}, + {'structPayload': STRUCT}, + {'protoPayload': json.loads(MessageToJson(message))}, + ], } batch = self._makeOne(logger, client=CLIENT) From 9a9a3769f83118e6c83e720a61dcfd57510387aa Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 12:37:08 -0400 Subject: [PATCH 52/68] Add coverage for can't-get-here 'else' clause. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1665#discussion_r57588502 --- gcloud/logging/logger.py | 2 +- gcloud/logging/test_logger.py | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index c824d4dbda68..3aea0585d83a 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -289,7 +289,7 @@ def commit(self, client=None): as_json_str = MessageToJson(entry) as_json = json.loads(as_json_str) info = {'protoPayload': as_json} - else: # pragma: NO COVER + else: raise ValueError('Unknown entry type: %s' % (entry_type,)) entries.append(info) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 679ee2cb9640..a155ce693fa9 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -318,6 +318,15 @@ def test_log_proto(self): self.assertEqual(len(connection._requested), 0) self.assertEqual(batch.entries, [('proto', message)]) + def test_commit_w_invalid_entry_type(self): + logger = _Logger() + conn = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=conn) + batch = self._makeOne(logger, CLIENT) + batch.entries.append(('bogus', 'BOGUS')) + with self.assertRaises(ValueError): + batch.commit() + def test_commit_w_bound_client(self): import json from google.protobuf.json_format import MessageToJson From 44d8e863ffd56434d9e414774aaab0552258bf03 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 12:12:37 -0400 Subject: [PATCH 53/68] Clean up docstring copy-pasta. --- gcloud/logging/_helpers.py | 2 +- gcloud/logging/client.py | 12 ++++++------ gcloud/logging/logger.py | 14 +++++++------- gcloud/logging/metric.py | 2 +- gcloud/logging/sink.py | 2 +- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/gcloud/logging/_helpers.py b/gcloud/logging/_helpers.py index aadd2aacdc52..8061abc2cd30 100644 --- a/gcloud/logging/_helpers.py +++ b/gcloud/logging/_helpers.py @@ -38,7 +38,7 @@ def logger_name_from_path(path, project): included for validation purposes. :rtype: string - :returns: Topic name parsed from ``path``. + :returns: Logger name parsed from ``path``. :raises: :class:`ValueError` if the ``path`` is ill-formed or if the project from the ``path`` does not agree with the ``project`` passed in. diff --git a/gcloud/logging/client.py b/gcloud/logging/client.py index afb7496db8a6..fd79d3100aa3 100644 --- a/gcloud/logging/client.py +++ b/gcloud/logging/client.py @@ -103,18 +103,18 @@ def list_entries(self, projects=None, filter_=None, order_by=None, :data:`gcloud.logging.DESCENDING`. :type page_size: int - :param page_size: maximum number of topics to return, If not passed, + :param page_size: maximum number of entries to return, If not passed, defaults to a value set by the API. :type page_token: string - :param page_token: opaque marker for the next "page" of topics. If not + :param page_token: opaque marker for the next "page" of entries. If not passed, the API will return the first page of - topics. + entries. :rtype: tuple, (list, str) :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that + more entries can be retrieved with another call (pass that value as ``page_token``). """ if projects is None: @@ -155,7 +155,7 @@ def sink(self, name, filter_, destination): :param destination: destination URI for the entries exported by the sink. - :rtype: :class:`gcloud.pubsub.sink.Sink` + :rtype: :class:`gcloud.logging.sink.Sink` :returns: Sink created with the current client. """ return Sink(name, filter_, destination, client=self) @@ -209,7 +209,7 @@ def metric(self, name, filter_, description=''): :type description: string :param description: the description of the metric to be constructed. - :rtype: :class:`gcloud.pubsub.metric.Metric` + :rtype: :class:`gcloud.logging.metric.Metric` :returns: Metric created with the current client. """ return Metric(name, filter_, client=self, description=description) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 26323f328811..2773c91f67d7 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -56,7 +56,7 @@ def _require_client(self, client): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. + ``client`` stored on the current logger. :rtype: :class:`gcloud.logging.client.Client` :returns: The client passed in or the currently bound client. @@ -152,9 +152,9 @@ def delete(self, client=None): """API call: delete all entries in a logger via a DELETE request See: - https://cloud.google.com/pubsub/reference/rest/v1/projects.topics/delete + https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs/delete - :type client: :class:`gcloud.pubsub.client.Client` or ``NoneType`` + :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. """ @@ -182,18 +182,18 @@ def list_entries(self, projects=None, filter_=None, order_by=None, :data:`gcloud.logging.DESCENDING`. :type page_size: int - :param page_size: maximum number of topics to return, If not passed, + :param page_size: maximum number of entries to return, If not passed, defaults to a value set by the API. :type page_token: string - :param page_token: opaque marker for the next "page" of topics. If not + :param page_token: opaque marker for the next "page" of entries. If not passed, the API will return the first page of - topics. + entries. :rtype: tuple, (list, str) :returns: list of :class:`gcloud.logging.entry.TextEntry`, plus a "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that + more entries can be retrieved with another call (pass that value as ``page_token``). """ log_filter = 'logName:%s' % (self.name,) diff --git a/gcloud/logging/metric.py b/gcloud/logging/metric.py index 38b7e3ad3d04..34fa343ff53f 100644 --- a/gcloud/logging/metric.py +++ b/gcloud/logging/metric.py @@ -100,7 +100,7 @@ def from_api_repr(cls, resource, client): :type resource: dict :param resource: metric resource representation returned from the API - :type client: :class:`gcloud.pubsub.client.Client` + :type client: :class:`gcloud.logging.client.Client` :param client: Client which holds credentials and project configuration for the metric. diff --git a/gcloud/logging/sink.py b/gcloud/logging/sink.py index d7fb24fa98c9..49f651bfe905 100644 --- a/gcloud/logging/sink.py +++ b/gcloud/logging/sink.py @@ -97,7 +97,7 @@ def from_api_repr(cls, resource, client): :type resource: dict :param resource: sink resource representation returned from the API - :type client: :class:`gcloud.pubsub.client.Client` + :type client: :class:`gcloud.logging.client.Client` :param client: Client which holds credentials and project configuration for the sink. From dfd815abc83fdb301613fc2254be7b5fcd546984 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 13:27:36 -0400 Subject: [PATCH 54/68] Track labels set on an entry by the backend. --- gcloud/logging/entries.py | 10 ++++++++-- gcloud/logging/test_entries.py | 11 ++++++++++- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/gcloud/logging/entries.py b/gcloud/logging/entries.py index d94d7d984a1a..ca1cf62f5db4 100644 --- a/gcloud/logging/entries.py +++ b/gcloud/logging/entries.py @@ -37,12 +37,17 @@ class _BaseEntry(object): :type timestamp: :class:`datetime.datetime`, or :class:`NoneType` :param timestamp: (optional) timestamp for the entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry """ - def __init__(self, payload, logger, insert_id=None, timestamp=None): + def __init__(self, payload, logger, + insert_id=None, timestamp=None, labels=None): self.payload = payload self.logger = logger self.insert_id = insert_id self.timestamp = timestamp + self.labels = labels @classmethod def from_api_repr(cls, resource, client, loggers=None): @@ -76,7 +81,8 @@ def from_api_repr(cls, resource, client, loggers=None): timestamp = resource.get('timestamp') if timestamp is not None: timestamp = _rfc3339_nanos_to_datetime(timestamp) - return cls(payload, logger, insert_id, timestamp) + labels = resource.get('labels') + return cls(payload, logger, insert_id, timestamp, labels) class TextEntry(_BaseEntry): diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py index 4505c7655ff6..2da275d71ea0 100644 --- a/gcloud/logging/test_entries.py +++ b/gcloud/logging/test_entries.py @@ -39,18 +39,21 @@ def test_ctor_defaults(self): self.assertTrue(entry.logger is logger) self.assertTrue(entry.insert_id is None) self.assertTrue(entry.timestamp is None) + self.assertTrue(entry.labels is None) def test_ctor_explicit(self): import datetime PAYLOAD = 'PAYLOAD' IID = 'IID' TIMESTAMP = datetime.datetime.now() + LABELS = {'foo': 'bar', 'baz': 'qux'} logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP) + entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP, LABELS) self.assertEqual(entry.payload, PAYLOAD) self.assertTrue(entry.logger is logger) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, TIMESTAMP) + self.assertEqual(entry.labels, LABELS) def test_from_api_repr_missing_data_no_loggers(self): client = _Client(self.PROJECT) @@ -79,11 +82,13 @@ def test_from_api_repr_w_loggers_no_logger_match(self): NOW = datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} API_REPR = { 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, + 'labels': LABELS, } loggers = {} klass = self._getTargetClass() @@ -91,6 +96,7 @@ def test_from_api_repr_w_loggers_no_logger_match(self): self.assertEqual(entry.payload, PAYLOAD) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) logger = entry.logger self.assertTrue(isinstance(logger, _Logger)) self.assertTrue(logger.client is client) @@ -106,11 +112,13 @@ def test_from_api_repr_w_loggers_w_logger_match(self): NOW = datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) + LABELS = {'foo': 'bar', 'baz': 'qux'} API_REPR = { 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, + 'labels': LABELS, } LOGGER = object() loggers = {LOG_NAME: LOGGER} @@ -119,6 +127,7 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertEqual(entry.payload, PAYLOAD) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.labels, LABELS) self.assertTrue(entry.logger is LOGGER) From 2c4c5b331589866b62c93f91b014e92f333d8537 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 13:30:55 -0400 Subject: [PATCH 55/68] Allow setting default labels on a logger instance. --- gcloud/logging/logger.py | 7 ++++++- gcloud/logging/test_logger.py | 15 ++++++++++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 59c77d49f5e9..30521a9711a0 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -31,10 +31,15 @@ class Logger(object): :type client: :class:`gcloud.logging.client.Client` :param client: A client which holds credentials and project configuration for the logger (which requires a project). + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of default labels for entries written + via this logger. """ - def __init__(self, name, client): + def __init__(self, name, client, labels=None): self.name = name self._client = client + self.labels = labels @property def client(self): diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index a155ce693fa9..bfed47cb8ea1 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -27,7 +27,7 @@ def _getTargetClass(self): def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) - def test_ctor(self): + def test_ctor_defaults(self): conn = _Connection() client = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client) @@ -36,6 +36,19 @@ def test_ctor(self): self.assertEqual(logger.project, self.PROJECT) self.assertEqual(logger.full_name, 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.labels, None) + + def test_ctor_explicit(self): + LABELS = {'foo': 'bar', 'baz': 'qux'} + conn = _Connection() + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, labels=LABELS) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertTrue(logger.client is client) + self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.full_name, 'projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.labels, LABELS) def test_batch_w_bound_client(self): from gcloud.logging.logger import Batch From 64e2f1cf9094871dce158dc61476210540213f7c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 13:49:13 -0400 Subject: [PATCH 56/68] Add support for logging entries with labels. Labels can be passed in explicitly to the 'log_text', 'log_struct', or 'log_proto' methods. If not passed, any default values configured on the logger instance will be used. See: #1566. --- gcloud/logging/logger.py | 46 ++++++++++++++- gcloud/logging/test_logger.py | 108 +++++++++++++++++++++++++++++++--- 2 files changed, 142 insertions(+), 12 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 30521a9711a0..2d76c699567c 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -83,7 +83,23 @@ def batch(self, client=None): client = self._require_client(client) return Batch(self, client) - def log_text(self, text, client=None): + def _get_labels(self, labels): + """Return effective labels. + + Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. + + :type labels: dict or :class:`NoneType` + :param labels: labels passed in to calling method. + + :rtype: dict or :class:`NoneType`. + :returns: the passed-in labels, if not none, else any default labels + configured on the logger instance. + """ + if labels is not None: + return labels + return self.labels + + def log_text(self, text, client=None, labels=None): """API call: log a text message via a POST request See: @@ -95,6 +111,9 @@ def log_text(self, text, client=None): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. """ client = self._require_client(client) @@ -107,10 +126,15 @@ def log_text(self, text, client=None): }, }], } + + labels = self._get_labels(labels) + if labels is not None: + data['entries'][0]['labels'] = labels + client.connection.api_request( method='POST', path='/entries:write', data=data) - def log_struct(self, info, client=None): + def log_struct(self, info, client=None, labels=None): """API call: log a structured message via a POST request See: @@ -122,6 +146,9 @@ def log_struct(self, info, client=None): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. """ client = self._require_client(client) @@ -134,10 +161,15 @@ def log_struct(self, info, client=None): }, }], } + + labels = self._get_labels(labels) + if labels is not None: + data['entries'][0]['labels'] = labels + client.connection.api_request( method='POST', path='/entries:write', data=data) - def log_proto(self, message, client=None): + def log_proto(self, message, client=None, labels=None): """API call: log a protobuf message via a POST request See: @@ -149,6 +181,9 @@ def log_proto(self, message, client=None): :type client: :class:`gcloud.logging.client.Client` or ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current logger. + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. """ client = self._require_client(client) as_json_str = MessageToJson(message) @@ -163,6 +198,11 @@ def log_proto(self, message, client=None): }, }], } + + labels = self._get_labels(labels) + if labels is not None: + data['entries'][0]['labels'] = labels + client.connection.api_request( method='POST', path='/entries:write', data=data) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index bfed47cb8ea1..7849c9d43013 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -94,13 +94,41 @@ def test_log_text_w_str_implicit_client(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) - def test_log_text_w_unicode_explicit_client(self): + def test_log_text_w_default_labels(self): + TEXT = 'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + logger.log_text(TEXT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'textPayload': TEXT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_text_w_unicode_explicit_client_and_labels(self): TEXT = u'TEXT' + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} conn = _Connection({}) client1 = _Client(self.PROJECT, object()) client2 = _Client(self.PROJECT, conn) - logger = self._makeOne(self.LOGGER_NAME, client=client1) - logger.log_text(TEXT, client=client2) + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + logger.log_text(TEXT, client=client2, labels=LABELS) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { @@ -111,6 +139,7 @@ def test_log_text_w_unicode_explicit_client(self): 'resource': { 'type': 'global', }, + 'labels': LABELS, }], } self.assertEqual(req['method'], 'POST') @@ -139,13 +168,41 @@ def test_log_struct_w_implicit_client(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) - def test_log_struct_w_explicit_client(self): + def test_log_struct_w_default_labels(self): STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + logger.log_struct(STRUCT) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'jsonPayload': STRUCT, + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_struct_w_explicit_client_and_labels(self): + STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} conn = _Connection({}) client1 = _Client(self.PROJECT, object()) client2 = _Client(self.PROJECT, conn) - logger = self._makeOne(self.LOGGER_NAME, client=client1) - logger.log_struct(STRUCT, client=client2) + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + logger.log_struct(STRUCT, client=client2, labels=LABELS) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { @@ -156,6 +213,7 @@ def test_log_struct_w_explicit_client(self): 'resource': { 'type': 'global', }, + 'labels': LABELS, }], } self.assertEqual(req['method'], 'POST') @@ -187,16 +245,47 @@ def test_log_proto_w_implicit_client(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) - def test_log_proto_w_explicit_client(self): + def test_log_proto_w_default_labels(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + conn = _Connection({}) + client = _Client(self.PROJECT, conn) + logger = self._makeOne(self.LOGGER_NAME, client=client, + labels=DEFAULT_LABELS) + logger.log_proto(message) + self.assertEqual(len(conn._requested), 1) + req = conn._requested[0] + SENT = { + 'entries': [{ + 'logName': 'projects/%s/logs/%s' % ( + self.PROJECT, self.LOGGER_NAME), + 'protoPayload': json.loads(MessageToJson(message)), + 'resource': { + 'type': 'global', + }, + 'labels': DEFAULT_LABELS, + }], + } + self.assertEqual(req['method'], 'POST') + self.assertEqual(req['path'], '/entries:write') + self.assertEqual(req['data'], SENT) + + def test_log_proto_w_explicit_client_and_labels(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} conn = _Connection({}) client1 = _Client(self.PROJECT, object()) client2 = _Client(self.PROJECT, conn) - logger = self._makeOne(self.LOGGER_NAME, client=client1) - logger.log_proto(message, client=client2) + logger = self._makeOne(self.LOGGER_NAME, client=client1, + labels=DEFAULT_LABELS) + logger.log_proto(message, client=client2, labels=LABELS) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { @@ -207,6 +296,7 @@ def test_log_proto_w_explicit_client(self): 'resource': { 'type': 'global', }, + 'labels': LABELS, }], } self.assertEqual(req['method'], 'POST') From 53df5fca4c68f59eb412a2e5890a0a4cf9bd8307 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 14:27:23 -0400 Subject: [PATCH 57/68] Add 'Logger.path' property. It was needed earlier for batch support, but was masked by the attribute on the mock '_Logger' used by the batch tests. --- gcloud/logging/logger.py | 8 ++++++-- gcloud/logging/test_logger.py | 4 ++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 2d76c699567c..4477df260572 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -56,6 +56,11 @@ def full_name(self): """Fully-qualified name used in logging APIs""" return 'projects/%s/logs/%s' % (self.project, self.name) + @property + def path(self): + """URI path for use in logging APIs""" + return '/%s' % (self.full_name,) + def _require_client(self, client): """Check client or verify over-ride. @@ -217,8 +222,7 @@ def delete(self, client=None): ``client`` stored on the current logger. """ client = self._require_client(client) - client.connection.api_request( - method='DELETE', path='/%s' % self.full_name) + client.connection.api_request(method='DELETE', path=self.path) def list_entries(self, projects=None, filter_=None, order_by=None, page_size=None, page_token=None): diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 7849c9d43013..9a5a698c27ea 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -36,6 +36,8 @@ def test_ctor_defaults(self): self.assertEqual(logger.project, self.PROJECT) self.assertEqual(logger.full_name, 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) self.assertEqual(logger.labels, None) def test_ctor_explicit(self): @@ -48,6 +50,8 @@ def test_ctor_explicit(self): self.assertEqual(logger.project, self.PROJECT) self.assertEqual(logger.full_name, 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME)) + self.assertEqual(logger.path, '/projects/%s/logs/%s' + % (self.PROJECT, self.LOGGER_NAME)) self.assertEqual(logger.labels, LABELS) def test_batch_w_bound_client(self): From 7aa9629c42aa00dd5794d4e2d6b0260b7d0f858e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 14:27:55 -0400 Subject: [PATCH 58/68] Add label support to the newly-added batch feature. --- gcloud/logging/logger.py | 29 +++++++++---- gcloud/logging/test_logger.py | 79 ++++++++++++++++++++++++++++------- 2 files changed, 86 insertions(+), 22 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 4477df260572..5f356b996e0c 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -291,29 +291,38 @@ def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None: self.commit() - def log_text(self, text): + def log_text(self, text, labels=None): """Add a text entry to be logged during :meth:`commit`. :type text: string :param text: the text entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. """ - self.entries.append(('text', text)) + self.entries.append(('text', text, labels)) - def log_struct(self, info): + def log_struct(self, info, labels=None): """Add a struct entry to be logged during :meth:`commit`. :type info: dict :param info: the struct entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. """ - self.entries.append(('struct', info)) + self.entries.append(('struct', info, labels)) - def log_proto(self, message): + def log_proto(self, message, labels=None): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message :param message: the protobuf entry + + :type labels: dict or :class:`NoneType` + :param labels: (optional) mapping of labels for the entry. """ - self.entries.append(('proto', message)) + self.entries.append(('proto', message, labels)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -324,12 +333,16 @@ def commit(self, client=None): """ if client is None: client = self.client + data = { 'logName': self.logger.path, 'resource': {'type': 'global'}, } + if self.logger.labels is not None: + data['labels'] = self.logger.labels + entries = data['entries'] = [] - for entry_type, entry in self.entries: + for entry_type, entry, labels in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': @@ -340,6 +353,8 @@ def commit(self, client=None): info = {'protoPayload': as_json} else: raise ValueError('Unknown entry type: %s' % (entry_type,)) + if labels is not None: + info['labels'] = labels entries.append(info) client.connection.api_request( diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 9a5a698c27ea..9149fabfddc4 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -394,7 +394,7 @@ def test_ctor_defaults(self): self.assertTrue(batch.client is CLIENT) self.assertEqual(len(batch.entries), 0) - def test_log_text(self): + def test_log_text_defaults(self): TEXT = 'This is the entry text' connection = _Connection() CLIENT = _Client(project=self.PROJECT, connection=connection) @@ -402,9 +402,20 @@ def test_log_text(self): batch = self._makeOne(logger, client=CLIENT) batch.log_text(TEXT) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('text', TEXT)]) + self.assertEqual(batch.entries, [('text', TEXT, None)]) - def test_log_struct(self): + def test_log_text_explicit(self): + TEXT = 'This is the entry text' + LABELS = {'foo': 'bar', 'baz': 'qux'} + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_text(TEXT, labels=LABELS) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, [('text', TEXT, LABELS)]) + + def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} connection = _Connection() CLIENT = _Client(project=self.PROJECT, connection=connection) @@ -412,9 +423,20 @@ def test_log_struct(self): batch = self._makeOne(logger, client=CLIENT) batch.log_struct(STRUCT) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('struct', STRUCT)]) + self.assertEqual(batch.entries, [('struct', STRUCT, None)]) - def test_log_proto(self): + def test_log_struct_explicit(self): + STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_struct(STRUCT, labels=LABELS) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, [('struct', STRUCT, LABELS)]) + + def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) connection = _Connection() @@ -423,14 +445,26 @@ def test_log_proto(self): batch = self._makeOne(logger, client=CLIENT) batch.log_proto(message) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('proto', message)]) + self.assertEqual(batch.entries, [('proto', message, None)]) + + def test_log_proto_explicit(self): + from google.protobuf.struct_pb2 import Struct, Value + message = Struct(fields={'foo': Value(bool_value=True)}) + LABELS = {'foo': 'bar', 'baz': 'qux'} + connection = _Connection() + CLIENT = _Client(project=self.PROJECT, connection=connection) + logger = _Logger() + batch = self._makeOne(logger, client=CLIENT) + batch.log_proto(message, labels=LABELS) + self.assertEqual(len(connection._requested), 0) + self.assertEqual(batch.entries, [('proto', message, LABELS)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() conn = _Connection() CLIENT = _Client(project=self.PROJECT, connection=conn) batch = self._makeOne(logger, CLIENT) - batch.entries.append(('bogus', 'BOGUS')) + batch.entries.append(('bogus', 'BOGUS', None)) with self.assertRaises(ValueError): batch.commit() @@ -471,25 +505,29 @@ def test_commit_w_alternate_client(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + from gcloud.logging.logger import Logger TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} conn1 = _Connection() conn2 = _Connection({}) CLIENT1 = _Client(project=self.PROJECT, connection=conn1) CLIENT2 = _Client(project=self.PROJECT, connection=conn2) - logger = _Logger() + logger = Logger('logger_name', CLIENT1, labels=DEFAULT_LABELS) SENT = { 'logName': logger.path, 'resource': {'type': 'global'}, + 'labels': DEFAULT_LABELS, 'entries': [ - {'textPayload': TEXT}, + {'textPayload': TEXT, 'labels': LABELS}, {'structPayload': STRUCT}, {'protoPayload': json.loads(MessageToJson(message))}, ], } batch = self._makeOne(logger, client=CLIENT1) - batch.log_text(TEXT) + batch.log_text(TEXT, labels=LABELS) batch.log_struct(STRUCT) batch.log_proto(message) batch.commit(client=CLIENT2) @@ -505,20 +543,24 @@ def test_context_mgr_success(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + from gcloud.logging.logger import Logger TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) + DEFAULT_LABELS = {'foo': 'spam'} + LABELS = {'foo': 'bar', 'baz': 'qux'} conn = _Connection({}) CLIENT = _Client(project=self.PROJECT, connection=conn) - logger = _Logger() + logger = Logger('logger_name', CLIENT, labels=DEFAULT_LABELS) SENT = { 'logName': logger.path, 'resource': { 'type': 'global', }, + 'labels': DEFAULT_LABELS, 'entries': [ {'textPayload': TEXT}, - {'structPayload': STRUCT}, + {'structPayload': STRUCT, 'labels': LABELS}, {'protoPayload': json.loads(MessageToJson(message))}, ], } @@ -526,7 +568,7 @@ def test_context_mgr_success(self): with batch as other: other.log_text(TEXT) - other.log_struct(STRUCT) + other.log_struct(STRUCT, labels=LABELS) other.log_proto(message) self.assertEqual(list(batch.entries), []) @@ -540,18 +582,23 @@ def test_context_mgr_failure(self): from google.protobuf.struct_pb2 import Struct, Value TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} + LABELS = {'foo': 'bar', 'baz': 'qux'} message = Struct(fields={'foo': Value(bool_value=True)}) conn = _Connection({}) CLIENT = _Client(project=self.PROJECT, connection=conn) logger = _Logger() - UNSENT = [('text', TEXT), ('struct', STRUCT), ('proto', message)] + UNSENT = [ + ('text', TEXT, None), + ('struct', STRUCT, None), + ('proto', message, LABELS), + ] batch = self._makeOne(logger, client=CLIENT) try: with batch as other: other.log_text(TEXT) other.log_struct(STRUCT) - other.log_proto(message) + other.log_proto(message, labels=LABELS) raise _Bugout() except _Bugout: pass @@ -562,6 +609,8 @@ def test_context_mgr_failure(self): class _Logger(object): + labels = None + def __init__(self, name="NAME", project="PROJECT"): self.path = '/projects/%s/logs/%s' % (project, name) From 43a09369919ba4f366c468e18a3a96d03e2d17b4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 14:45:11 -0400 Subject: [PATCH 59/68] Fix incorrect 'structPayload' entry in 'Batch.commit'. --- gcloud/logging/logger.py | 2 +- gcloud/logging/test_logger.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index 5f356b996e0c..fea76e862d3e 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -346,7 +346,7 @@ def commit(self, client=None): if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': - info = {'structPayload': entry} + info = {'jsonPayload': entry} elif entry_type == 'proto': as_json_str = MessageToJson(entry) as_json = json.loads(as_json_str) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index 9149fabfddc4..ad698de504f9 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -485,7 +485,7 @@ def test_commit_w_bound_client(self): }, 'entries': [ {'textPayload': TEXT}, - {'structPayload': STRUCT}, + {'jsonPayload': STRUCT}, {'protoPayload': json.loads(MessageToJson(message))}, ], } @@ -522,7 +522,7 @@ def test_commit_w_alternate_client(self): 'labels': DEFAULT_LABELS, 'entries': [ {'textPayload': TEXT, 'labels': LABELS}, - {'structPayload': STRUCT}, + {'jsonPayload': STRUCT}, {'protoPayload': json.loads(MessageToJson(message))}, ], } @@ -560,7 +560,7 @@ def test_context_mgr_success(self): 'labels': DEFAULT_LABELS, 'entries': [ {'textPayload': TEXT}, - {'structPayload': STRUCT, 'labels': LABELS}, + {'jsonPayload': STRUCT, 'labels': LABELS}, {'protoPayload': json.loads(MessageToJson(message))}, ], } From 5cbeaca0514b55442e7ff32789d0b511e9ffe21d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 14:46:13 -0400 Subject: [PATCH 60/68] Factor out construction of log entry resource into a helper. Addresses: https://github.com/GoogleCloudPlatform/gcloud-python/pull/1668#discussion_r57606003 --- gcloud/logging/logger.py | 94 ++++++++++++++++++---------------------- 1 file changed, 43 insertions(+), 51 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index fea76e862d3e..f9dc41ff6459 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -88,21 +88,49 @@ def batch(self, client=None): client = self._require_client(client) return Batch(self, client) - def _get_labels(self, labels): - """Return effective labels. + def _make_entry_resource(self, text=None, info=None, message=None, + labels=None): + """Return a log entry resource of the appropriate type. Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. + Only one of ``text``, ``info``, or ``message`` should be passed. + + :type text: string or :class:`NoneType` + :param text: text payload + + :type info: dict or :class:`NoneType` + :param info: struct payload + + :type message: Protobuf message or :class:`NoneType` + :param message: protobuf payload + :type labels: dict or :class:`NoneType` :param labels: labels passed in to calling method. - - :rtype: dict or :class:`NoneType`. - :returns: the passed-in labels, if not none, else any default labels - configured on the logger instance. """ + resource = { + 'logName': self.full_name, + 'resource': {'type': 'global'}, + } + + if text is not None: + resource['textPayload'] = text + + if info is not None: + resource['jsonPayload'] = info + + if message is not None: + as_json_str = MessageToJson(message) + as_json = json.loads(as_json_str) + resource['protoPayload'] = as_json + + if labels is None: + labels = self.labels + if labels is not None: - return labels - return self.labels + resource['labels'] = labels + + return resource def log_text(self, text, client=None, labels=None): """API call: log a text message via a POST request @@ -121,20 +149,9 @@ def log_text(self, text, client=None, labels=None): :param labels: (optional) mapping of labels for the entry. """ client = self._require_client(client) + entry_resource = self._make_entry_resource(text=text, labels=labels) - data = { - 'entries': [{ - 'logName': self.full_name, - 'textPayload': text, - 'resource': { - 'type': 'global', - }, - }], - } - - labels = self._get_labels(labels) - if labels is not None: - data['entries'][0]['labels'] = labels + data = {'entries': [entry_resource]} client.connection.api_request( method='POST', path='/entries:write', data=data) @@ -156,20 +173,8 @@ def log_struct(self, info, client=None, labels=None): :param labels: (optional) mapping of labels for the entry. """ client = self._require_client(client) - - data = { - 'entries': [{ - 'logName': self.full_name, - 'jsonPayload': info, - 'resource': { - 'type': 'global', - }, - }], - } - - labels = self._get_labels(labels) - if labels is not None: - data['entries'][0]['labels'] = labels + entry_resource = self._make_entry_resource(info=info, labels=labels) + data = {'entries': [entry_resource]} client.connection.api_request( method='POST', path='/entries:write', data=data) @@ -191,22 +196,9 @@ def log_proto(self, message, client=None, labels=None): :param labels: (optional) mapping of labels for the entry. """ client = self._require_client(client) - as_json_str = MessageToJson(message) - as_json = json.loads(as_json_str) - - data = { - 'entries': [{ - 'logName': self.full_name, - 'protoPayload': as_json, - 'resource': { - 'type': 'global', - }, - }], - } - - labels = self._get_labels(labels) - if labels is not None: - data['entries'][0]['labels'] = labels + entry_resource = self._make_entry_resource( + message=message, labels=labels) + data = {'entries': [entry_resource]} client.connection.api_request( method='POST', path='/entries:write', data=data) From 36a9520f28819a693bef829a48b664da95806ed7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 16:03:26 -0400 Subject: [PATCH 61/68] Add 'severity'/'http_request' attrs to '_BaseEntry'. Parse from API resource if found. --- gcloud/logging/entries.py | 18 ++++++++++++--- gcloud/logging/test_entries.py | 40 ++++++++++++++++++++++++++++++++-- 2 files changed, 53 insertions(+), 5 deletions(-) diff --git a/gcloud/logging/entries.py b/gcloud/logging/entries.py index ca1cf62f5db4..e26ac7ef0cd3 100644 --- a/gcloud/logging/entries.py +++ b/gcloud/logging/entries.py @@ -40,14 +40,23 @@ class _BaseEntry(object): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry """ - def __init__(self, payload, logger, - insert_id=None, timestamp=None, labels=None): + def __init__(self, payload, logger, insert_id=None, timestamp=None, + labels=None, severity=None, http_request=None): self.payload = payload self.logger = logger self.insert_id = insert_id self.timestamp = timestamp self.labels = labels + self.severity = severity + self.http_request = http_request @classmethod def from_api_repr(cls, resource, client, loggers=None): @@ -82,7 +91,10 @@ def from_api_repr(cls, resource, client, loggers=None): if timestamp is not None: timestamp = _rfc3339_nanos_to_datetime(timestamp) labels = resource.get('labels') - return cls(payload, logger, insert_id, timestamp, labels) + severity = resource.get('severity') + http_request = resource.get('httpRequest') + return cls(payload, logger, insert_id=insert_id, timestamp=timestamp, + labels=labels, severity=severity, http_request=http_request) class TextEntry(_BaseEntry): diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py index 2da275d71ea0..cb78d51a2e18 100644 --- a/gcloud/logging/test_entries.py +++ b/gcloud/logging/test_entries.py @@ -40,6 +40,8 @@ def test_ctor_defaults(self): self.assertTrue(entry.insert_id is None) self.assertTrue(entry.timestamp is None) self.assertTrue(entry.labels is None) + self.assertTrue(entry.severity is None) + self.assertTrue(entry.http_request is None) def test_ctor_explicit(self): import datetime @@ -47,13 +49,31 @@ def test_ctor_explicit(self): IID = 'IID' TIMESTAMP = datetime.datetime.now() LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' logger = _Logger(self.LOGGER_NAME, self.PROJECT) - entry = self._makeOne(PAYLOAD, logger, IID, TIMESTAMP, LABELS) + entry = self._makeOne(PAYLOAD, logger, + insert_id=IID, + timestamp=TIMESTAMP, + labels=LABELS, + severity=SEVERITY, + http_request={ + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + }, + ) self.assertEqual(entry.payload, PAYLOAD) self.assertTrue(entry.logger is logger) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, TIMESTAMP) self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) def test_from_api_repr_missing_data_no_loggers(self): client = _Client(self.PROJECT) @@ -68,6 +88,8 @@ def test_from_api_repr_missing_data_no_loggers(self): self.assertEqual(entry.payload, PAYLOAD) self.assertTrue(entry.insert_id is None) self.assertTrue(entry.timestamp is None) + self.assertTrue(entry.severity is None) + self.assertTrue(entry.http_request is None) logger = entry.logger self.assertTrue(isinstance(logger, _Logger)) self.assertTrue(logger.client is client) @@ -76,27 +98,41 @@ def test_from_api_repr_missing_data_no_loggers(self): def test_from_api_repr_w_loggers_no_logger_match(self): from datetime import datetime from gcloud._helpers import UTC + klass = self._getTargetClass() client = _Client(self.PROJECT) PAYLOAD = 'PAYLOAD' + SEVERITY = 'CRITICAL' IID = 'IID' NOW = datetime.utcnow().replace(tzinfo=UTC) TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) LOG_NAME = 'projects/%s/logs/%s' % (self.PROJECT, self.LOGGER_NAME) LABELS = {'foo': 'bar', 'baz': 'qux'} + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' API_REPR = { 'dummyPayload': PAYLOAD, 'logName': LOG_NAME, 'insertId': IID, 'timestamp': TIMESTAMP, 'labels': LABELS, + 'severity': SEVERITY, + 'httpRequest': { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + }, } loggers = {} - klass = self._getTargetClass() entry = klass.from_api_repr(API_REPR, client, loggers=loggers) self.assertEqual(entry.payload, PAYLOAD) self.assertEqual(entry.insert_id, IID) self.assertEqual(entry.timestamp, NOW) self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.severity, SEVERITY) + self.assertEqual(entry.http_request['requestMethod'], METHOD) + self.assertEqual(entry.http_request['requestUrl'], URI) + self.assertEqual(entry.http_request['status'], STATUS) logger = entry.logger self.assertTrue(isinstance(logger, _Logger)) self.assertTrue(logger.client is client) From 145cbfbdce788e1b81883b97955919974b4057ea Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 28 Mar 2016 16:36:16 -0400 Subject: [PATCH 62/68] Add support for logging w/ per-request metadata. - insert_id - severity - http_request See: #1566. --- gcloud/logging/logger.py | 129 +++++++++++++++++++--- gcloud/logging/test_entries.py | 12 +-- gcloud/logging/test_logger.py | 189 ++++++++++++++++++++++++++------- 3 files changed, 273 insertions(+), 57 deletions(-) diff --git a/gcloud/logging/logger.py b/gcloud/logging/logger.py index f9dc41ff6459..f7bb50ee4a80 100644 --- a/gcloud/logging/logger.py +++ b/gcloud/logging/logger.py @@ -89,7 +89,8 @@ def batch(self, client=None): return Batch(self, client) def _make_entry_resource(self, text=None, info=None, message=None, - labels=None): + labels=None, insert_id=None, severity=None, + http_request=None): """Return a log entry resource of the appropriate type. Helper for :meth:`log_text`, :meth:`log_struct`, and :meth:`log_proto`. @@ -107,6 +108,16 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type labels: dict or :class:`NoneType` :param labels: labels passed in to calling method. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry """ resource = { 'logName': self.full_name, @@ -130,9 +141,19 @@ def _make_entry_resource(self, text=None, info=None, message=None, if labels is not None: resource['labels'] = labels + if insert_id is not None: + resource['insertId'] = insert_id + + if severity is not None: + resource['severity'] = severity + + if http_request is not None: + resource['httpRequest'] = http_request + return resource - def log_text(self, text, client=None, labels=None): + def log_text(self, text, client=None, labels=None, insert_id=None, + severity=None, http_request=None): """API call: log a text message via a POST request See: @@ -147,16 +168,28 @@ def log_text(self, text, client=None, labels=None): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry """ client = self._require_client(client) - entry_resource = self._make_entry_resource(text=text, labels=labels) - + entry_resource = self._make_entry_resource( + text=text, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) data = {'entries': [entry_resource]} client.connection.api_request( method='POST', path='/entries:write', data=data) - def log_struct(self, info, client=None, labels=None): + def log_struct(self, info, client=None, labels=None, insert_id=None, + severity=None, http_request=None): """API call: log a structured message via a POST request See: @@ -171,15 +204,28 @@ def log_struct(self, info, client=None, labels=None): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry """ client = self._require_client(client) - entry_resource = self._make_entry_resource(info=info, labels=labels) + entry_resource = self._make_entry_resource( + info=info, labels=labels, insert_id=insert_id, severity=severity, + http_request=http_request) data = {'entries': [entry_resource]} client.connection.api_request( method='POST', path='/entries:write', data=data) - def log_proto(self, message, client=None, labels=None): + def log_proto(self, message, client=None, labels=None, insert_id=None, + severity=None, http_request=None): """API call: log a protobuf message via a POST request See: @@ -194,10 +240,21 @@ def log_proto(self, message, client=None, labels=None): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry """ client = self._require_client(client) entry_resource = self._make_entry_resource( - message=message, labels=labels) + message=message, labels=labels, insert_id=insert_id, + severity=severity, http_request=http_request) data = {'entries': [entry_resource]} client.connection.api_request( @@ -283,7 +340,8 @@ def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is None: self.commit() - def log_text(self, text, labels=None): + def log_text(self, text, labels=None, insert_id=None, severity=None, + http_request=None): """Add a text entry to be logged during :meth:`commit`. :type text: string @@ -291,10 +349,22 @@ def log_text(self, text, labels=None): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. """ - self.entries.append(('text', text, labels)) + self.entries.append( + ('text', text, labels, insert_id, severity, http_request)) - def log_struct(self, info, labels=None): + def log_struct(self, info, labels=None, insert_id=None, severity=None, + http_request=None): """Add a struct entry to be logged during :meth:`commit`. :type info: dict @@ -302,10 +372,22 @@ def log_struct(self, info, labels=None): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. """ - self.entries.append(('struct', info, labels)) + self.entries.append( + ('struct', info, labels, insert_id, severity, http_request)) - def log_proto(self, message, labels=None): + def log_proto(self, message, labels=None, insert_id=None, severity=None, + http_request=None): """Add a protobuf entry to be logged during :meth:`commit`. :type message: protobuf message @@ -313,8 +395,19 @@ def log_proto(self, message, labels=None): :type labels: dict or :class:`NoneType` :param labels: (optional) mapping of labels for the entry. + + :type insert_id: string or :class:`NoneType` + :param insert_id: (optional) unique ID for log entry. + + :type severity: string or :class:`NoneType` + :param severity: (optional) severity of event being logged. + + :type http_request: dict or :class:`NoneType` + :param http_request: (optional) info about HTTP request associated with + the entry. """ - self.entries.append(('proto', message, labels)) + self.entries.append( + ('proto', message, labels, insert_id, severity, http_request)) def commit(self, client=None): """Send saved log entries as a single API call. @@ -334,7 +427,7 @@ def commit(self, client=None): data['labels'] = self.logger.labels entries = data['entries'] = [] - for entry_type, entry, labels in self.entries: + for entry_type, entry, labels, iid, severity, http_req in self.entries: if entry_type == 'text': info = {'textPayload': entry} elif entry_type == 'struct': @@ -347,6 +440,12 @@ def commit(self, client=None): raise ValueError('Unknown entry type: %s' % (entry_type,)) if labels is not None: info['labels'] = labels + if iid is not None: + info['insertId'] = iid + if severity is not None: + info['severity'] = severity + if http_req is not None: + info['httpRequest'] = http_req entries.append(info) client.connection.api_request( diff --git a/gcloud/logging/test_entries.py b/gcloud/logging/test_entries.py index cb78d51a2e18..312e456d63e0 100644 --- a/gcloud/logging/test_entries.py +++ b/gcloud/logging/test_entries.py @@ -53,18 +53,18 @@ def test_ctor_explicit(self): METHOD = 'POST' URI = 'https://api.example.com/endpoint' STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } logger = _Logger(self.LOGGER_NAME, self.PROJECT) entry = self._makeOne(PAYLOAD, logger, insert_id=IID, timestamp=TIMESTAMP, labels=LABELS, severity=SEVERITY, - http_request={ - 'requestMethod': METHOD, - 'requestUrl': URI, - 'status': STATUS, - }, - ) + http_request=REQUEST) self.assertEqual(entry.payload, PAYLOAD) self.assertTrue(entry.logger is logger) self.assertEqual(entry.insert_id, IID) diff --git a/gcloud/logging/test_logger.py b/gcloud/logging/test_logger.py index ad698de504f9..069ad2f47d2e 100644 --- a/gcloud/logging/test_logger.py +++ b/gcloud/logging/test_logger.py @@ -123,16 +123,27 @@ def test_log_text_w_default_labels(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) - def test_log_text_w_unicode_explicit_client_and_labels(self): + def test_log_text_w_unicode_explicit_client_labels_severity_httpreq(self): TEXT = u'TEXT' DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } conn = _Connection({}) client1 = _Client(self.PROJECT, object()) client2 = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - logger.log_text(TEXT, client=client2, labels=LABELS) + logger.log_text(TEXT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, http_request=REQUEST) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { @@ -144,6 +155,9 @@ def test_log_text_w_unicode_explicit_client_and_labels(self): 'type': 'global', }, 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, }], } self.assertEqual(req['method'], 'POST') @@ -197,16 +211,28 @@ def test_log_struct_w_default_labels(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) - def test_log_struct_w_explicit_client_and_labels(self): + def test_log_struct_w_explicit_client_labels_severity_httpreq(self): STRUCT = {'message': 'MESSAGE', 'weather': 'cloudy'} DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } conn = _Connection({}) client1 = _Client(self.PROJECT, object()) client2 = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - logger.log_struct(STRUCT, client=client2, labels=LABELS) + logger.log_struct(STRUCT, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { @@ -218,6 +244,9 @@ def test_log_struct_w_explicit_client_and_labels(self): 'type': 'global', }, 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, }], } self.assertEqual(req['method'], 'POST') @@ -277,19 +306,31 @@ def test_log_proto_w_default_labels(self): self.assertEqual(req['path'], '/entries:write') self.assertEqual(req['data'], SENT) - def test_log_proto_w_explicit_client_and_labels(self): + def test_log_proto_w_explicit_client_labels_severity_httpreq(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } conn = _Connection({}) client1 = _Client(self.PROJECT, object()) client2 = _Client(self.PROJECT, conn) logger = self._makeOne(self.LOGGER_NAME, client=client1, labels=DEFAULT_LABELS) - logger.log_proto(message, client=client2, labels=LABELS) + logger.log_proto(message, client=client2, labels=LABELS, + insert_id=IID, severity=SEVERITY, + http_request=REQUEST) self.assertEqual(len(conn._requested), 1) req = conn._requested[0] SENT = { @@ -301,6 +342,9 @@ def test_log_proto_w_explicit_client_and_labels(self): 'type': 'global', }, 'labels': LABELS, + 'insertId': IID, + 'severity': SEVERITY, + 'httpRequest': REQUEST, }], } self.assertEqual(req['method'], 'POST') @@ -402,18 +446,31 @@ def test_log_text_defaults(self): batch = self._makeOne(logger, client=CLIENT) batch.log_text(TEXT) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('text', TEXT, None)]) + self.assertEqual(batch.entries, + [('text', TEXT, None, None, None, None)]) def test_log_text_explicit(self): TEXT = 'This is the entry text' LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } connection = _Connection() CLIENT = _Client(project=self.PROJECT, connection=connection) logger = _Logger() batch = self._makeOne(logger, client=CLIENT) - batch.log_text(TEXT, labels=LABELS) + batch.log_text(TEXT, labels=LABELS, insert_id=IID, severity=SEVERITY, + http_request=REQUEST) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('text', TEXT, LABELS)]) + self.assertEqual(batch.entries, + [('text', TEXT, LABELS, IID, SEVERITY, REQUEST)]) def test_log_struct_defaults(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} @@ -423,18 +480,31 @@ def test_log_struct_defaults(self): batch = self._makeOne(logger, client=CLIENT) batch.log_struct(STRUCT) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('struct', STRUCT, None)]) + self.assertEqual(batch.entries, + [('struct', STRUCT, None, None, None, None)]) def test_log_struct_explicit(self): STRUCT = {'message': 'Message text', 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } connection = _Connection() CLIENT = _Client(project=self.PROJECT, connection=connection) logger = _Logger() batch = self._makeOne(logger, client=CLIENT) - batch.log_struct(STRUCT, labels=LABELS) + batch.log_struct(STRUCT, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('struct', STRUCT, LABELS)]) + self.assertEqual(batch.entries, + [('struct', STRUCT, LABELS, IID, SEVERITY, REQUEST)]) def test_log_proto_defaults(self): from google.protobuf.struct_pb2 import Struct, Value @@ -445,26 +515,39 @@ def test_log_proto_defaults(self): batch = self._makeOne(logger, client=CLIENT) batch.log_proto(message) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('proto', message, None)]) + self.assertEqual(batch.entries, + [('proto', message, None, None, None, None)]) def test_log_proto_explicit(self): from google.protobuf.struct_pb2 import Struct, Value message = Struct(fields={'foo': Value(bool_value=True)}) LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } connection = _Connection() CLIENT = _Client(project=self.PROJECT, connection=connection) logger = _Logger() batch = self._makeOne(logger, client=CLIENT) - batch.log_proto(message, labels=LABELS) + batch.log_proto(message, labels=LABELS, insert_id=IID, + severity=SEVERITY, http_request=REQUEST) self.assertEqual(len(connection._requested), 0) - self.assertEqual(batch.entries, [('proto', message, LABELS)]) + self.assertEqual(batch.entries, + [('proto', message, LABELS, IID, SEVERITY, REQUEST)]) def test_commit_w_invalid_entry_type(self): logger = _Logger() conn = _Connection() CLIENT = _Client(project=self.PROJECT, connection=conn) batch = self._makeOne(logger, CLIENT) - batch.entries.append(('bogus', 'BOGUS', None)) + batch.entries.append(('bogus', 'BOGUS', None, None, None, None)) with self.assertRaises(ValueError): batch.commit() @@ -475,6 +558,9 @@ def test_commit_w_bound_client(self): TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} message = Struct(fields={'foo': Value(bool_value=True)}) + IID1 = 'IID1' + IID2 = 'IID2' + IID3 = 'IID3' conn = _Connection({}) CLIENT = _Client(project=self.PROJECT, connection=conn) logger = _Logger() @@ -484,15 +570,16 @@ def test_commit_w_bound_client(self): 'type': 'global', }, 'entries': [ - {'textPayload': TEXT}, - {'jsonPayload': STRUCT}, - {'protoPayload': json.loads(MessageToJson(message))}, + {'textPayload': TEXT, 'insertId': IID1}, + {'jsonPayload': STRUCT, 'insertId': IID2}, + {'protoPayload': json.loads(MessageToJson(message)), + 'insertId': IID3}, ], } batch = self._makeOne(logger, client=CLIENT) - batch.log_text(TEXT) - batch.log_struct(STRUCT) - batch.log_proto(message) + batch.log_text(TEXT, insert_id=IID1) + batch.log_struct(STRUCT, insert_id=IID2) + batch.log_proto(message, insert_id=IID3) batch.commit() self.assertEqual(list(batch.entries), []) self.assertEqual(len(conn._requested), 1) @@ -511,6 +598,15 @@ def test_commit_w_alternate_client(self): message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } conn1 = _Connection() conn2 = _Connection({}) CLIENT1 = _Client(project=self.PROJECT, connection=conn1) @@ -522,14 +618,15 @@ def test_commit_w_alternate_client(self): 'labels': DEFAULT_LABELS, 'entries': [ {'textPayload': TEXT, 'labels': LABELS}, - {'jsonPayload': STRUCT}, - {'protoPayload': json.loads(MessageToJson(message))}, + {'jsonPayload': STRUCT, 'severity': SEVERITY}, + {'protoPayload': json.loads(MessageToJson(message)), + 'httpRequest': REQUEST}, ], } batch = self._makeOne(logger, client=CLIENT1) batch.log_text(TEXT, labels=LABELS) - batch.log_struct(STRUCT) - batch.log_proto(message) + batch.log_struct(STRUCT, severity=SEVERITY) + batch.log_proto(message, http_request=REQUEST) batch.commit(client=CLIENT2) self.assertEqual(list(batch.entries), []) self.assertEqual(len(conn1._requested), 0) @@ -549,6 +646,15 @@ def test_context_mgr_success(self): message = Struct(fields={'foo': Value(bool_value=True)}) DEFAULT_LABELS = {'foo': 'spam'} LABELS = {'foo': 'bar', 'baz': 'qux'} + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } conn = _Connection({}) CLIENT = _Client(project=self.PROJECT, connection=conn) logger = Logger('logger_name', CLIENT, labels=DEFAULT_LABELS) @@ -559,17 +665,18 @@ def test_context_mgr_success(self): }, 'labels': DEFAULT_LABELS, 'entries': [ - {'textPayload': TEXT}, + {'textPayload': TEXT, 'httpRequest': REQUEST}, {'jsonPayload': STRUCT, 'labels': LABELS}, - {'protoPayload': json.loads(MessageToJson(message))}, + {'protoPayload': json.loads(MessageToJson(message)), + 'severity': SEVERITY}, ], } batch = self._makeOne(logger, client=CLIENT) with batch as other: - other.log_text(TEXT) + other.log_text(TEXT, http_request=REQUEST) other.log_struct(STRUCT, labels=LABELS) - other.log_proto(message) + other.log_proto(message, severity=SEVERITY) self.assertEqual(list(batch.entries), []) self.assertEqual(len(conn._requested), 1) @@ -583,22 +690,32 @@ def test_context_mgr_failure(self): TEXT = 'This is the entry text' STRUCT = {'message': TEXT, 'weather': 'partly cloudy'} LABELS = {'foo': 'bar', 'baz': 'qux'} + IID = 'IID' + SEVERITY = 'CRITICAL' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } message = Struct(fields={'foo': Value(bool_value=True)}) conn = _Connection({}) CLIENT = _Client(project=self.PROJECT, connection=conn) logger = _Logger() UNSENT = [ - ('text', TEXT, None), - ('struct', STRUCT, None), - ('proto', message, LABELS), + ('text', TEXT, None, IID, None, None), + ('struct', STRUCT, None, None, SEVERITY, None), + ('proto', message, LABELS, None, None, REQUEST), ] batch = self._makeOne(logger, client=CLIENT) try: with batch as other: - other.log_text(TEXT) - other.log_struct(STRUCT) - other.log_proto(message, labels=LABELS) + other.log_text(TEXT, insert_id=IID) + other.log_struct(STRUCT, severity=SEVERITY) + other.log_proto(message, labels=LABELS, http_request=REQUEST) raise _Bugout() except _Bugout: pass From 660441d973b94d4b3ec4028135e780dc60486802 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 30 Mar 2016 12:11:43 -0400 Subject: [PATCH 63/68] Avoid reusing logger name between tests. Fixes order-based failure between 'log_struct' and 'log_test'. #1657 still breaks the teardown. --- system_tests/logging_.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index fdef82679479..9dc792aa8d60 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -22,7 +22,6 @@ _MILLIS = 1000 * time.time() -DEFAULT_LOGGER_NAME = 'system-tests-logger-%d' % (_MILLIS,) DEFAULT_METRIC_NAME = 'system-tests-metric-%d' % (_MILLIS,) DEFAULT_SINK_NAME = 'system-tests-sink-%d' % (_MILLIS,) DEFAULT_FILTER = 'logName:syslog AND severity>=INFO' @@ -54,9 +53,14 @@ def tearDown(self): for doomed in self.to_delete: doomed.delete() + @staticmethod + def _logger_name(): + _millis = 1000 * time.time() + return 'system-tests-logger-%d' % (_millis,) + def test_log_text(self): TEXT_PAYLOAD = 'System test: test_log_text' - logger = Config.CLIENT.logger(DEFAULT_LOGGER_NAME) + logger = Config.CLIENT.logger(self._logger_name()) self.to_delete.append(logger) logger.log_text(TEXT_PAYLOAD) time.sleep(2) @@ -69,7 +73,7 @@ def test_log_struct(self): 'message': 'System test: test_log_struct', 'weather': 'partly cloudy', } - logger = Config.CLIENT.logger(DEFAULT_LOGGER_NAME) + logger = Config.CLIENT.logger(self._logger_name()) self.to_delete.append(logger) logger.log_struct(JSON_PAYLOAD) time.sleep(2) From a7749e97f0ecd9e65bd672243b95f608e48e4402 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 30 Mar 2016 12:25:40 -0400 Subject: [PATCH 64/68] Add system test for 'Sink.reload'. Uses the Bigquery destination because that one runs quickest. --- system_tests/logging_.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index fdef82679479..715fb1f201f9 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -150,7 +150,7 @@ def test_create_sink_storage_bucket(self): self.to_delete.append(sink) self.assertTrue(sink.exists()) - def test_create_sink_bigquery_dataset(self): + def _init_bigquery_dataset(self): from gcloud import bigquery from gcloud.bigquery.dataset import AccessGrant DATASET_URI = 'bigquery.googleapis.com/projects/%s/datasets/%s' % ( @@ -168,10 +168,24 @@ def test_create_sink_bigquery_dataset(self): 'WRITER', 'groupByEmail', 'cloud-logs@google.com')) dataset.access_grants = grants dataset.update() + return DATASET_URI - sink = Config.CLIENT.sink( - DEFAULT_SINK_NAME, DEFAULT_FILTER, DATASET_URI) + def test_create_sink_bigquery_dataset(self): + uri = self._init_bigquery_dataset() + sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) sink.create() self.to_delete.append(sink) self.assertTrue(sink.exists()) + + def test_reload_sink(self): + uri = self._init_bigquery_dataset() + sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + sink.filter_ = 'BOGUS FILTER' + sink.destination = 'BOGUS DESTINATION' + sink.reload() + self.assertEqual(sink.filter_, DEFAULT_FILTER) + self.assertEqual(sink.destination, uri) From 1800393ad9f85183fc7e859726c3364d6aea695f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 23 Mar 2016 11:57:59 -0400 Subject: [PATCH 65/68] Add system test for sink creation w/ pubsub topic. --- system_tests/logging_.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index 38539b84fe90..f5edbd5cb139 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -28,6 +28,7 @@ DEFAULT_DESCRIPTION = 'System testing' BUCKET_NAME = 'gcloud-python-system-testing-%d' % (_MILLIS,) DATASET_NAME = 'system_testing_dataset_%d' % (_MILLIS,) +TOPIC_NAME = 'gcloud-python-system-testing-%d' % (_MILLIS,) class Config(object): @@ -154,6 +155,28 @@ def test_create_sink_storage_bucket(self): self.to_delete.append(sink) self.assertTrue(sink.exists()) + def test_create_sink_pubsub_topic(self): + from gcloud import pubsub + + # Create the destination topic, and set up the IAM policy to allow + # Cloud Logging to write into it. + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(TOPIC_NAME) + topic.create() + self.to_delete.append(topic) + policy = topic.get_iam_policy() + policy.owners.add(policy.group('cloud-logs@google.com')) + topic.set_iam_policy(policy) + + TOPIC_URI = 'pubsub.googleapis.com/%s' % (topic.full_name,) + + sink = Config.CLIENT.sink( + DEFAULT_SINK_NAME, DEFAULT_FILTER, TOPIC_URI) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + self.assertTrue(sink.exists()) + def _init_bigquery_dataset(self): from gcloud import bigquery from gcloud.bigquery.dataset import AccessGrant From a0c2d89ba41feb2d13cf715cfc166deadcbbd9ec Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 30 Mar 2016 13:36:22 -0400 Subject: [PATCH 66/68] Add system test for 'Sink.update'. --- system_tests/logging_.py | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index 38539b84fe90..6683f001804c 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -132,7 +132,7 @@ def test_update_metric(self): self.assertEqual(after.filter_, NEW_FILTER) self.assertEqual(after.description, NEW_DESCRIPTION) - def test_create_sink_storage_bucket(self): + def _init_storage_bucket(self): from gcloud import storage BUCKET_URI = 'storage.googleapis.com/%s' % (BUCKET_NAME,) @@ -147,8 +147,12 @@ def test_create_sink_storage_bucket(self): bucket.acl.add_entity(logs_group) bucket.acl.save() - sink = Config.CLIENT.sink( - DEFAULT_SINK_NAME, DEFAULT_FILTER, BUCKET_URI) + return BUCKET_URI + + def test_create_sink_storage_bucket(self): + uri = self._init_storage_bucket() + + sink = Config.CLIENT.sink(DEFAULT_SINK_NAME, DEFAULT_FILTER, uri) self.assertFalse(sink.exists()) sink.create() self.to_delete.append(sink) @@ -193,3 +197,18 @@ def test_reload_sink(self): sink.reload() self.assertEqual(sink.filter_, DEFAULT_FILTER) self.assertEqual(sink.destination, uri) + + def test_update_sink(self): + bucket_uri = self._init_storage_bucket() + dataset_uri = self._init_bigquery_dataset() + UPDATED_FILTER = 'logName:syslog' + sink = Config.CLIENT.sink( + DEFAULT_SINK_NAME, DEFAULT_FILTER, bucket_uri) + self.assertFalse(sink.exists()) + sink.create() + self.to_delete.append(sink) + sink.filter_ = UPDATED_FILTER + sink.destination = dataset_uri + sink.update() + self.assertEqual(sink.filter_, UPDATED_FILTER) + self.assertEqual(sink.destination, dataset_uri) From 95f6a671c41fcdf212abf72d11b38a5a1c377b45 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 30 Mar 2016 14:58:39 -0400 Subject: [PATCH 67/68] Add system tests for logging w/ insert_id/severty/http_request metdata. --- system_tests/logging_.py | 57 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index 38539b84fe90..b7b50e982620 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -68,6 +68,33 @@ def test_log_text(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + def test_log_text_w_metadata(self): + TEXT_PAYLOAD = 'System test: test_log_text' + INSERT_ID = 'INSERTID' + SEVERITY = 'INFO' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, + http_request=REQUEST) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request['requestMethod'], METHOD) + self.assertEqual(request['requestUrl'], URI) + self.assertEqual(request['status'], int(STATUS)) + def test_log_struct(self): JSON_PAYLOAD = { 'message': 'System test: test_log_struct', @@ -81,6 +108,36 @@ def test_log_struct(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, JSON_PAYLOAD) + def test_log_struct_w_metadata(self): + JSON_PAYLOAD = { + 'message': 'System test: test_log_struct', + 'weather': 'partly cloudy', + } + INSERT_ID = 'INSERTID' + SEVERITY = 'INFO' + METHOD = 'POST' + URI = 'https://api.example.com/endpoint' + STATUS = '500' + REQUEST = { + 'requestMethod': METHOD, + 'requestUrl': URI, + 'status': STATUS, + } + logger = Config.CLIENT.logger(self._logger_name()) + self.to_delete.append(logger) + logger.log_struct(JSON_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, + http_request=REQUEST) + time.sleep(2) + entries, _ = logger.list_entries() + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request['requestMethod'], METHOD) + self.assertEqual(request['requestUrl'], URI) + self.assertEqual(request['status'], int(STATUS)) + def test_create_metric(self): metric = Config.CLIENT.metric( DEFAULT_METRIC_NAME, DEFAULT_FILTER, DEFAULT_DESCRIPTION) From 9bd2880440d588d559976150981ee79e743638bc Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 30 Mar 2016 15:07:32 -0400 Subject: [PATCH 68/68] Add exponential backoff for deletion failures. Fixes: #1657. Note for potential broader use toward #1619. --- system_tests/logging_.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/system_tests/logging_.py b/system_tests/logging_.py index 38539b84fe90..ff4115cb81a0 100644 --- a/system_tests/logging_.py +++ b/system_tests/logging_.py @@ -50,8 +50,18 @@ def setUp(self): self.to_delete = [] def tearDown(self): + from gcloud.exceptions import NotFound for doomed in self.to_delete: - doomed.delete() + backoff_intervals = [1, 2, 4, 8] + while True: + try: + doomed.delete() + break + except NotFound: + if backoff_intervals: + time.sleep(backoff_intervals.pop(0)) + else: + raise @staticmethod def _logger_name():