From f784121f2ffa06255d7469f68f6c49051d9313d2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 25 May 2017 07:03:40 -0700 Subject: [PATCH 01/86] [WIP] Getting started on Pub/Sub. --- pubsub/google/cloud/gapic/__init__.py | 1 + pubsub/google/cloud/gapic/pubsub/__init__.py | 1 + .../cloud/gapic/pubsub/v1}/__init__.py | 0 .../cloud/gapic/pubsub/v1/publisher_client.py | 565 ++++ .../pubsub/v1/publisher_client_config.json | 98 + .../gapic/pubsub/v1/subscriber_client.py | 1065 +++++++ .../pubsub/v1/subscriber_client_config.json | 129 + pubsub/google/cloud/proto/__init__.py | 1 + pubsub/google/cloud/proto/pubsub/__init__.py | 1 + .../google/cloud/proto/pubsub/v1/__init__.py | 1 + .../cloud/proto/pubsub/v1/pubsub_pb2.py | 2671 +++++++++++++++++ .../cloud/proto/pubsub/v1/pubsub_pb2_grpc.py | 461 +++ pubsub/google/cloud/pubsub.py | 24 + pubsub/google/cloud/pubsub/__init__.py | 34 - pubsub/google/cloud/pubsub/_gax.py | 796 ----- pubsub/google/cloud/pubsub/_helpers.py | 73 - pubsub/google/cloud/pubsub/_http.py | 782 ----- pubsub/google/cloud/pubsub/client.py | 283 -- pubsub/google/cloud/pubsub/iam.py | 138 - pubsub/google/cloud/pubsub/message.py | 91 - pubsub/google/cloud/pubsub/snapshot.py | 140 - pubsub/google/cloud/pubsub/subscription.py | 590 ---- pubsub/google/cloud/pubsub/topic.py | 551 ---- pubsub/google/cloud/pubsub_v1/__init__.py | 23 + pubsub/google/cloud/pubsub_v1/_gapic.py | 53 + .../cloud/pubsub_v1/publisher}/__init__.py | 9 +- .../google/cloud/pubsub_v1/publisher/batch.py | 144 + .../cloud/pubsub_v1/publisher/client.py | 108 + pubsub/google/cloud/pubsub_v1/subscriber.py | 34 + pubsub/google/cloud/pubsub_v1/types.py | 60 + pubsub/setup.py | 5 +- pubsub/tests/system.py | 393 --- pubsub/tests/unit/test__gax.py | 1598 ---------- pubsub/tests/unit/test__helpers.py | 59 - pubsub/tests/unit/test__http.py | 1162 ------- pubsub/tests/unit/test_client.py | 462 --- pubsub/tests/unit/test_iam.py | 81 - pubsub/tests/unit/test_message.py | 125 - pubsub/tests/unit/test_snpashot.py | 215 -- pubsub/tests/unit/test_subscription.py | 957 ------ pubsub/tests/unit/test_topic.py | 974 ------ 41 files changed, 5450 insertions(+), 9508 deletions(-) create mode 100644 pubsub/google/cloud/gapic/__init__.py create mode 100644 pubsub/google/cloud/gapic/pubsub/__init__.py rename pubsub/{tests => google/cloud/gapic/pubsub/v1}/__init__.py (100%) create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json create mode 100644 pubsub/google/cloud/proto/__init__.py create mode 100644 pubsub/google/cloud/proto/pubsub/__init__.py create mode 100644 pubsub/google/cloud/proto/pubsub/v1/__init__.py create mode 100644 pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py create mode 100644 pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py create mode 100644 pubsub/google/cloud/pubsub.py delete mode 100644 pubsub/google/cloud/pubsub/__init__.py delete mode 100644 pubsub/google/cloud/pubsub/_gax.py delete mode 100644 pubsub/google/cloud/pubsub/_helpers.py delete mode 100644 pubsub/google/cloud/pubsub/_http.py delete mode 100644 pubsub/google/cloud/pubsub/client.py delete mode 100644 pubsub/google/cloud/pubsub/iam.py delete mode 100644 pubsub/google/cloud/pubsub/message.py delete mode 100644 pubsub/google/cloud/pubsub/snapshot.py delete mode 100644 pubsub/google/cloud/pubsub/subscription.py delete mode 100644 pubsub/google/cloud/pubsub/topic.py create mode 100644 pubsub/google/cloud/pubsub_v1/__init__.py create mode 100644 pubsub/google/cloud/pubsub_v1/_gapic.py rename pubsub/{tests/unit => google/cloud/pubsub_v1/publisher}/__init__.py (77%) create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/batch.py create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/client.py create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber.py create mode 100644 pubsub/google/cloud/pubsub_v1/types.py delete mode 100644 pubsub/tests/system.py delete mode 100644 pubsub/tests/unit/test__gax.py delete mode 100644 pubsub/tests/unit/test__helpers.py delete mode 100644 pubsub/tests/unit/test__http.py delete mode 100644 pubsub/tests/unit/test_client.py delete mode 100644 pubsub/tests/unit/test_iam.py delete mode 100644 pubsub/tests/unit/test_message.py delete mode 100644 pubsub/tests/unit/test_snpashot.py delete mode 100644 pubsub/tests/unit/test_subscription.py delete mode 100644 pubsub/tests/unit/test_topic.py diff --git a/pubsub/google/cloud/gapic/__init__.py b/pubsub/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/gapic/pubsub/__init__.py b/pubsub/google/cloud/gapic/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/tests/__init__.py b/pubsub/google/cloud/gapic/pubsub/v1/__init__.py similarity index 100% rename from pubsub/tests/__init__.py rename to pubsub/google/cloud/gapic/pubsub/v1/__init__.py diff --git a/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py new file mode 100644 index 000000000000..c0466e6d444b --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py @@ -0,0 +1,565 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Publisher API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class PublisherClient(object): + """ + The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_topics': + _PageDesc('page_token', 'next_page_token', 'topics'), + 'list_topic_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A PublisherClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'publisher_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Publisher', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.publisher_stub = config.create_stub( + pubsub_pb2.PublisherStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_topic = api_callable.create_api_call( + self.publisher_stub.CreateTopic, settings=defaults['create_topic']) + self._publish = api_callable.create_api_call( + self.publisher_stub.Publish, settings=defaults['publish']) + self._get_topic = api_callable.create_api_call( + self.publisher_stub.GetTopic, settings=defaults['get_topic']) + self._list_topics = api_callable.create_api_call( + self.publisher_stub.ListTopics, settings=defaults['list_topics']) + self._list_topic_subscriptions = api_callable.create_api_call( + self.publisher_stub.ListTopicSubscriptions, + settings=defaults['list_topic_subscriptions']) + self._delete_topic = api_callable.create_api_call( + self.publisher_stub.DeleteTopic, settings=defaults['delete_topic']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_topic(self, name, options=None): + """ + Creates the given topic with the given name. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> name = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_topic(name) + + Args: + name (string): The name of the topic. It must have the format + ``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter, + and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent + signs (``%``). It must be between 3 and 255 characters in length, and it + must not start with ``\"goog\"``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Topic(name=name) + return self._create_topic(request, options) + + def publish(self, topic, messages, options=None): + """ + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'' + >>> messages_element = pubsub_pb2.PubsubMessage(data=data) + >>> messages = [messages_element] + >>> response = client.publish(topic, messages) + + Args: + topic (string): The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages (list[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PubsubMessage`]): The messages to publish. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PublishResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + return self._publish(request, options) + + def get_topic(self, topic, options=None): + """ + Gets the configuration of a topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_topic(topic) + + Args: + topic (string): The name of the topic to get. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetTopicRequest(topic=topic) + return self._get_topic(request, options) + + def list_topics(self, project, page_size=None, options=None): + """ + Lists matching topics. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topics(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that topics belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicsRequest( + project=project, page_size=page_size) + return self._list_topics(request, options) + + def list_topic_subscriptions(self, topic, page_size=None, options=None): + """ + Lists the name of the subscriptions for this topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topic_subscriptions(topic): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + topic (string): The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of string instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicSubscriptionsRequest( + topic=topic, page_size=page_size) + return self._list_topic_subscriptions(request, options) + + def delete_topic(self, topic, options=None): + """ + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> client.delete_topic(topic) + + Args: + topic (string): Name of the topic to delete. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteTopicRequest(topic=topic) + self._delete_topic(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.iam.v1 import policy_pb2 + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json new file mode 100644 index 000000000000..7e8a723499e6 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json @@ -0,0 +1,98 @@ +{ + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "one_plus_delivery": [ + "CANCELLED", + "UNKNOWN", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "ABORTED", + "INTERNAL", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Publish": { + "timeout_millis": 60000, + "retry_codes_name": "one_plus_delivery", + "retry_params_name": "messaging", + "bundling": { + "element_count_threshold": 10, + "element_count_limit": 1000, + "request_byte_threshold": 1024, + "request_byte_limit": 10485760, + "delay_threshold_millis": 10 + } + }, + "GetTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopics": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopicSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py new file mode 100644 index 000000000000..ab8233824595 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -0,0 +1,1065 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Subscriber API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +from google.gax.utils import oneof +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class SubscriberClient(object): + """ + The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the ``Pull`` method. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions'), + 'list_snapshots': + _PageDesc('page_token', 'next_page_token', 'snapshots') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _SNAPSHOT_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/snapshots/{snapshot}') + _SUBSCRIPTION_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/subscriptions/{subscription}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def snapshot_path(cls, project, snapshot): + """Returns a fully-qualified snapshot resource name string.""" + return cls._SNAPSHOT_PATH_TEMPLATE.render({ + 'project': project, + 'snapshot': snapshot, + }) + + @classmethod + def subscription_path(cls, project, subscription): + """Returns a fully-qualified subscription resource name string.""" + return cls._SUBSCRIPTION_PATH_TEMPLATE.render({ + 'project': + project, + 'subscription': + subscription, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_snapshot_name(cls, snapshot_name): + """Parses the project from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the project. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('project') + + @classmethod + def match_snapshot_from_snapshot_name(cls, snapshot_name): + """Parses the snapshot from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the snapshot. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('snapshot') + + @classmethod + def match_project_from_subscription_name(cls, subscription_name): + """Parses the project from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the project. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'project') + + @classmethod + def match_subscription_from_subscription_name(cls, subscription_name): + """Parses the subscription from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the subscription. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'subscription') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A SubscriberClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'subscriber_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Subscriber', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.subscriber_stub = config.create_stub( + pubsub_pb2.SubscriberStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_subscription = api_callable.create_api_call( + self.subscriber_stub.CreateSubscription, + settings=defaults['create_subscription']) + self._get_subscription = api_callable.create_api_call( + self.subscriber_stub.GetSubscription, + settings=defaults['get_subscription']) + self._update_subscription = api_callable.create_api_call( + self.subscriber_stub.UpdateSubscription, + settings=defaults['update_subscription']) + self._list_subscriptions = api_callable.create_api_call( + self.subscriber_stub.ListSubscriptions, + settings=defaults['list_subscriptions']) + self._delete_subscription = api_callable.create_api_call( + self.subscriber_stub.DeleteSubscription, + settings=defaults['delete_subscription']) + self._modify_ack_deadline = api_callable.create_api_call( + self.subscriber_stub.ModifyAckDeadline, + settings=defaults['modify_ack_deadline']) + self._acknowledge = api_callable.create_api_call( + self.subscriber_stub.Acknowledge, settings=defaults['acknowledge']) + self._pull = api_callable.create_api_call( + self.subscriber_stub.Pull, settings=defaults['pull']) + self._streaming_pull = api_callable.create_api_call( + self.subscriber_stub.StreamingPull, + settings=defaults['streaming_pull']) + self._modify_push_config = api_callable.create_api_call( + self.subscriber_stub.ModifyPushConfig, + settings=defaults['modify_push_config']) + self._list_snapshots = api_callable.create_api_call( + self.subscriber_stub.ListSnapshots, + settings=defaults['list_snapshots']) + self._create_snapshot = api_callable.create_api_call( + self.subscriber_stub.CreateSnapshot, + settings=defaults['create_snapshot']) + self._delete_snapshot = api_callable.create_api_call( + self.subscriber_stub.DeleteSnapshot, + settings=defaults['delete_snapshot']) + self._seek = api_callable.create_api_call( + self.subscriber_stub.Seek, settings=defaults['seek']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_subscription(self, + name, + topic, + push_config=None, + ack_deadline_seconds=None, + retain_acked_messages=None, + message_retention_duration=None, + options=None): + """ + Creates a subscription to a given topic. + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + `resource name format `_. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_subscription(name, topic) + + Args: + name (string): The name of the subscription. It must have the format + ``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must + start with a letter, and contain only letters (``[A-Za-z]``), numbers + (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters + in length, and it must not start with ``\"goog\"``. + topic (string): The name of the topic from which this subscription is receiving messages. + Format is ``projects/{project}/topics/{topic}``. + The value of this field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that the subscriber + will pull and ack messages using API methods. + ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message + before the subscriber should acknowledge the message. After message + delivery but before the ack deadline expires and before the message is + acknowledged, it is an outstanding message and will not be delivered + again during that time (on a best-effort basis). + + For pull subscriptions, this value is used as the initial value for the ack + deadline. To override this value for a given message, call + ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using + pull. + The minimum custom deadline you can specify is 10 seconds. + The maximum custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + + For push delivery, this value is also used to set the request timeout for + the call to the push endpoint. + + If the subscriber never acknowledges the message, the Pub/Sub + system will eventually redeliver the message. + retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then + messages are not expunged from the subscription's backlog, even if they are + acknowledged, until they fall out of the ``message_retention_duration`` + window. + message_retention_duration (:class:`google.protobuf.duration_pb2.Duration`): How long to retain unacknowledged messages in the subscription's backlog, + from the moment a message is published. + If ``retain_acked_messages`` is true, then this also configures the retention + of acknowledged messages, and thus configures how far back in time a ``Seek`` + can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Subscription( + name=name, + topic=topic, + push_config=push_config, + ack_deadline_seconds=ack_deadline_seconds, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration) + return self._create_subscription(request, options) + + def get_subscription(self, subscription, options=None): + """ + Gets the configuration details of a subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_subscription(subscription) + + Args: + subscription (string): The name of the subscription to get. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + return self._get_subscription(request, options) + + def update_subscription(self, subscription, update_mask, options=None): + """ + Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> from google.protobuf import field_mask_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = pubsub_pb2.Subscription() + >>> update_mask = field_mask_pb2.FieldMask() + >>> response = client.update_subscription(subscription, update_mask) + + Args: + subscription (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription`): The updated subscription object. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.UpdateSubscriptionRequest( + subscription=subscription, update_mask=update_mask) + return self._update_subscription(request, options) + + def list_subscriptions(self, project, page_size=None, options=None): + """ + Lists matching subscriptions. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_subscriptions(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSubscriptionsRequest( + project=project, page_size=page_size) + return self._list_subscriptions(request, options) + + def delete_subscription(self, subscription, options=None): + """ + Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to ``Pull`` after deletion will return + ``NOT_FOUND``. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> client.delete_subscription(subscription) + + Args: + subscription (string): The subscription to delete. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSubscriptionRequest( + subscription=subscription) + self._delete_subscription(request, options) + + def modify_ack_deadline(self, + subscription, + ack_ids, + ack_deadline_seconds, + options=None): + """ + Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level ``ackDeadlineSeconds`` used for subsequent messages. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> ack_deadline_seconds = 0 + >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): List of acknowledgment IDs. + ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to + the Pub/Sub system. For example, if the value is 10, the new + ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero may immediately make the message available for + another pull request. + The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 minutes). + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyAckDeadlineRequest( + subscription=subscription, + ack_ids=ack_ids, + ack_deadline_seconds=ack_deadline_seconds) + self._modify_ack_deadline(request, options) + + def acknowledge(self, subscription, ack_ids, options=None): + """ + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> client.acknowledge(subscription, ack_ids) + + Args: + subscription (string): The subscription whose message is being acknowledged. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): The acknowledgment ID for the messages being acknowledged that was returned + by the Pub/Sub system in the ``Pull`` response. Must not be empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.AcknowledgeRequest( + subscription=subscription, ack_ids=ack_ids) + self._acknowledge(request, options) + + def pull(self, + subscription, + max_messages, + return_immediately=None, + options=None): + """ + Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return ``UNAVAILABLE`` if + there are too many concurrent pull requests pending for the given + subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> max_messages = 0 + >>> response = client.pull(subscription, max_messages) + + Args: + subscription (string): The subscription from which messages should be pulled. + Format is ``projects/{project}/subscriptions/{sub}``. + max_messages (int): The maximum number of messages returned for this request. The Pub/Sub + system may return fewer than the number specified. + return_immediately (bool): If this field set to true, the system will respond immediately even if + it there are no messages available to return in the ``Pull`` response. + Otherwise, the system may wait (for a bounded amount of time) until at + least one message is available, rather than returning no messages. The + client may cancel the request if it does not wish to wait any longer for + the response. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PullResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PullRequest( + subscription=subscription, + max_messages=max_messages, + return_immediately=return_immediately) + return self._pull(request, options) + + def streaming_pull(self, requests, options=None): + """ + (EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status ``OK`` to reassign + server-side resources, in which case, the client should re-establish the + stream. ``UNAVAILABLE`` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> stream_ack_deadline_seconds = 0 + >>> request = pubsub_pb2.StreamingPullRequest(subscription=subscription, stream_ack_deadline_seconds=stream_ack_deadline_seconds) + >>> requests = [request] + >>> for element in client.streaming_pull(requests): + >>> # process element + >>> pass + + Args: + requests (iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullRequest`]): The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullResponse`]. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_pull(requests, options) + + def modify_push_config(self, subscription, push_config, options=None): + """ + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty ``PushConfig``) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the ``PushConfig``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> push_config = pubsub_pb2.PushConfig() + >>> client.modify_push_config(subscription, push_config) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system should + stop pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyPushConfigRequest( + subscription=subscription, push_config=push_config) + self._modify_push_config(request, options) + + def list_snapshots(self, project, page_size=None, options=None): + """ + Lists the existing snapshots. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_snapshots(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that snapshots belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSnapshotsRequest( + project=project, page_size=page_size) + return self._list_snapshots(request, options) + + def create_snapshot(self, name, subscription, options=None): + """ + Creates a snapshot from the requested subscription. + If the snapshot already exists, returns ``ALREADY_EXISTS``. + If the requested subscription doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + `resource name format `_. + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.create_snapshot(name, subscription) + + Args: + name (string): Optional user-provided name for this snapshot. + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription. + Note that for REST API requests, you must specify a name. + Format is ``projects/{project}/snapshots/{snap}``. + subscription (string): The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + (a) The existing backlog on the subscription. More precisely, this is + :: + + defined as the messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + `CreateSnapshot` request; as well as: + (b) Any messages published to the subscription's topic following the + :: + + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.CreateSnapshotRequest( + name=name, subscription=subscription) + return self._create_snapshot(request, options) + + def delete_snapshot(self, snapshot, options=None): + """ + Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> client.delete_snapshot(snapshot) + + Args: + snapshot (string): The name of the snapshot to delete. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + self._delete_snapshot(request, options) + + def seek(self, subscription, time=None, snapshot=None, options=None): + """ + Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.seek(subscription) + + Args: + subscription (string): The subscription to affect. + time (:class:`google.protobuf.timestamp_pb2.Timestamp`): The time to seek to. + Messages retained in the subscription that were published before this + time are marked as acknowledged, and messages retained in the + subscription that were published after this time are marked as + unacknowledged. Note that this operation affects only those messages + retained in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). For example, + if ``time`` corresponds to a point before the message retention + window (or to a point before the system's notion of the subscription + creation time), only retained messages will be marked as unacknowledged, + and already-expunged messages will not be restored. + snapshot (string): The snapshot to seek to. The snapshot's topic must be the same as that of + the provided subscription. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.SeekResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + oneof.check_oneof( + time=time, + snapshot=snapshot, ) + + # Create the request object. + request = pubsub_pb2.SeekRequest( + subscription=subscription, time=time, snapshot=snapshot) + return self._seek(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.iam.v1 import policy_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json new file mode 100644 index 000000000000..4b31158fbac8 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json @@ -0,0 +1,129 @@ +{ + "interfaces": { + "google.pubsub.v1.Subscriber": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ], + "pull": [ + "CANCELLED", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "INTERNAL", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ModifyAckDeadline": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Acknowledge": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "messaging" + }, + "Pull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "messaging" + }, + "StreamingPull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "messaging" + }, + "ModifyPushConfig": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListSnapshots": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Seek": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/pubsub/google/cloud/proto/__init__.py b/pubsub/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/proto/pubsub/__init__.py b/pubsub/google/cloud/proto/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/proto/pubsub/v1/__init__.py b/pubsub/google/cloud/proto/pubsub/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py new file mode 100644 index 000000000000..07919f8c5646 --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py @@ -0,0 +1,2671 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/pubsub/v1/pubsub.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/pubsub/v1/pubsub.proto', + package='google.pubsub.v1', + syntax='proto3', + serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x15\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xda\x01\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"X\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xe8\x0f\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9b\x06\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TOPIC = _descriptor.Descriptor( + name='Topic', + full_name='google.pubsub.v1.Topic', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Topic.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=221, + serialized_end=242, +) + + +_PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PubsubMessage.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=415, + serialized_end=464, +) + +_PUBSUBMESSAGE = _descriptor.Descriptor( + name='PubsubMessage', + full_name='google.pubsub.v1.PubsubMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='google.pubsub.v1.PubsubMessage.data', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PubsubMessage.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_id', full_name='google.pubsub.v1.PubsubMessage.message_id', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='publish_time', full_name='google.pubsub.v1.PubsubMessage.publish_time', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=245, + serialized_end=464, +) + + +_GETTOPICREQUEST = _descriptor.Descriptor( + name='GetTopicRequest', + full_name='google.pubsub.v1.GetTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.GetTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=466, + serialized_end=498, +) + + +_PUBLISHREQUEST = _descriptor.Descriptor( + name='PublishRequest', + full_name='google.pubsub.v1.PublishRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.PublishRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='messages', full_name='google.pubsub.v1.PublishRequest.messages', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=500, + serialized_end=582, +) + + +_PUBLISHRESPONSE = _descriptor.Descriptor( + name='PublishResponse', + full_name='google.pubsub.v1.PublishResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_ids', full_name='google.pubsub.v1.PublishResponse.message_ids', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=584, + serialized_end=622, +) + + +_LISTTOPICSREQUEST = _descriptor.Descriptor( + name='ListTopicsRequest', + full_name='google.pubsub.v1.ListTopicsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListTopicsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=624, + serialized_end=699, +) + + +_LISTTOPICSRESPONSE = _descriptor.Descriptor( + name='ListTopicsResponse', + full_name='google.pubsub.v1.ListTopicsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topics', full_name='google.pubsub.v1.ListTopicsResponse.topics', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=701, + serialized_end=787, +) + + +_LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListTopicSubscriptionsRequest', + full_name='google.pubsub.v1.ListTopicSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=789, + serialized_end=874, +) + + +_LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListTopicSubscriptionsResponse', + full_name='google.pubsub.v1.ListTopicSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=876, + serialized_end=956, +) + + +_DELETETOPICREQUEST = _descriptor.Descriptor( + name='DeleteTopicRequest', + full_name='google.pubsub.v1.DeleteTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.DeleteTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=958, + serialized_end=993, +) + + +_SUBSCRIPTION = _descriptor.Descriptor( + name='Subscription', + full_name='google.pubsub.v1.Subscription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Subscription.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Subscription.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.Subscription.push_config', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.Subscription.ack_deadline_seconds', index=3, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='retain_acked_messages', full_name='google.pubsub.v1.Subscription.retain_acked_messages', index=4, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_retention_duration', full_name='google.pubsub.v1.Subscription.message_retention_duration', index=5, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=996, + serialized_end=1214, +) + + +_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PushConfig.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PushConfig.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PushConfig.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=415, + serialized_end=464, +) + +_PUSHCONFIG = _descriptor.Descriptor( + name='PushConfig', + full_name='google.pubsub.v1.PushConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='push_endpoint', full_name='google.pubsub.v1.PushConfig.push_endpoint', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PushConfig.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1217, + serialized_end=1369, +) + + +_RECEIVEDMESSAGE = _descriptor.Descriptor( + name='ReceivedMessage', + full_name='google.pubsub.v1.ReceivedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ack_id', full_name='google.pubsub.v1.ReceivedMessage.ack_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='google.pubsub.v1.ReceivedMessage.message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1371, + serialized_end=1454, +) + + +_GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='GetSubscriptionRequest', + full_name='google.pubsub.v1.GetSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.GetSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1456, + serialized_end=1502, +) + + +_UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='UpdateSubscriptionRequest', + full_name='google.pubsub.v1.UpdateSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.UpdateSubscriptionRequest.subscription', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSubscriptionRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1505, + serialized_end=1635, +) + + +_LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListSubscriptionsRequest', + full_name='google.pubsub.v1.ListSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSubscriptionsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1637, + serialized_end=1719, +) + + +_LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListSubscriptionsResponse', + full_name='google.pubsub.v1.ListSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListSubscriptionsResponse.subscriptions', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1721, + serialized_end=1828, +) + + +_DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='DeleteSubscriptionRequest', + full_name='google.pubsub.v1.DeleteSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.DeleteSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1830, + serialized_end=1879, +) + + +_MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( + name='ModifyPushConfigRequest', + full_name='google.pubsub.v1.ModifyPushConfigRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyPushConfigRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.ModifyPushConfigRequest.push_config', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1881, + serialized_end=1979, +) + + +_PULLREQUEST = _descriptor.Descriptor( + name='PullRequest', + full_name='google.pubsub.v1.PullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.PullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_immediately', full_name='google.pubsub.v1.PullRequest.return_immediately', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_messages', full_name='google.pubsub.v1.PullRequest.max_messages', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1981, + serialized_end=2066, +) + + +_PULLRESPONSE = _descriptor.Descriptor( + name='PullResponse', + full_name='google.pubsub.v1.PullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.PullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2068, + serialized_end=2144, +) + + +_MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( + name='ModifyAckDeadlineRequest', + full_name='google.pubsub.v1.ModifyAckDeadlineRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids', index=1, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2146, + serialized_end=2241, +) + + +_ACKNOWLEDGEREQUEST = _descriptor.Descriptor( + name='AcknowledgeRequest', + full_name='google.pubsub.v1.AcknowledgeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.AcknowledgeRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.AcknowledgeRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2243, + serialized_end=2302, +) + + +_STREAMINGPULLREQUEST = _descriptor.Descriptor( + name='StreamingPullRequest', + full_name='google.pubsub.v1.StreamingPullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.StreamingPullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids', index=3, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream_ack_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2305, + serialized_end=2469, +) + + +_STREAMINGPULLRESPONSE = _descriptor.Descriptor( + name='StreamingPullResponse', + full_name='google.pubsub.v1.StreamingPullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.StreamingPullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2471, + serialized_end=2556, +) + + +_CREATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='CreateSnapshotRequest', + full_name='google.pubsub.v1.CreateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.CreateSnapshotRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.CreateSnapshotRequest.subscription', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2558, + serialized_end=2617, +) + + +_SNAPSHOT = _descriptor.Descriptor( + name='Snapshot', + full_name='google.pubsub.v1.Snapshot', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Snapshot.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Snapshot.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='expire_time', full_name='google.pubsub.v1.Snapshot.expire_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2619, + serialized_end=2707, +) + + +_LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( + name='ListSnapshotsRequest', + full_name='google.pubsub.v1.ListSnapshotsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSnapshotsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSnapshotsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSnapshotsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2709, + serialized_end=2787, +) + + +_LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( + name='ListSnapshotsResponse', + full_name='google.pubsub.v1.ListSnapshotsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshots', full_name='google.pubsub.v1.ListSnapshotsResponse.snapshots', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSnapshotsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2789, + serialized_end=2884, +) + + +_DELETESNAPSHOTREQUEST = _descriptor.Descriptor( + name='DeleteSnapshotRequest', + full_name='google.pubsub.v1.DeleteSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.DeleteSnapshotRequest.snapshot', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2886, + serialized_end=2927, +) + + +_SEEKREQUEST = _descriptor.Descriptor( + name='SeekRequest', + full_name='google.pubsub.v1.SeekRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.SeekRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='time', full_name='google.pubsub.v1.SeekRequest.time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.SeekRequest.snapshot', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='target', full_name='google.pubsub.v1.SeekRequest.target', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2929, + serialized_end=3038, +) + + +_SEEKRESPONSE = _descriptor.Descriptor( + name='SeekResponse', + full_name='google.pubsub.v1.SeekResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3040, + serialized_end=3054, +) + +_PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE +_PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY +_PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_PUBLISHREQUEST.fields_by_name['messages'].message_type = _PUBSUBMESSAGE +_LISTTOPICSRESPONSE.fields_by_name['topics'].message_type = _TOPIC +_SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG +_SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG +_PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY +_RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['subscription'].message_type = _SUBSCRIPTION +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTSUBSCRIPTIONSRESPONSE.fields_by_name['subscriptions'].message_type = _SUBSCRIPTION +_MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG +_PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_SNAPSHOT.fields_by_name['expire_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LISTSNAPSHOTSRESPONSE.fields_by_name['snapshots'].message_type = _SNAPSHOT +_SEEKREQUEST.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['time']) +_SEEKREQUEST.fields_by_name['time'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['snapshot']) +_SEEKREQUEST.fields_by_name['snapshot'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC +DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE +DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST +DESCRIPTOR.message_types_by_name['PublishRequest'] = _PUBLISHREQUEST +DESCRIPTOR.message_types_by_name['PublishResponse'] = _PUBLISHRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicsRequest'] = _LISTTOPICSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicsResponse'] = _LISTTOPICSRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsRequest'] = _LISTTOPICSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsResponse'] = _LISTTOPICSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteTopicRequest'] = _DELETETOPICREQUEST +DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name['PushConfig'] = _PUSHCONFIG +DESCRIPTOR.message_types_by_name['ReceivedMessage'] = _RECEIVEDMESSAGE +DESCRIPTOR.message_types_by_name['GetSubscriptionRequest'] = _GETSUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['UpdateSubscriptionRequest'] = _UPDATESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsRequest'] = _LISTSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsResponse'] = _LISTSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSubscriptionRequest'] = _DELETESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ModifyPushConfigRequest'] = _MODIFYPUSHCONFIGREQUEST +DESCRIPTOR.message_types_by_name['PullRequest'] = _PULLREQUEST +DESCRIPTOR.message_types_by_name['PullResponse'] = _PULLRESPONSE +DESCRIPTOR.message_types_by_name['ModifyAckDeadlineRequest'] = _MODIFYACKDEADLINEREQUEST +DESCRIPTOR.message_types_by_name['AcknowledgeRequest'] = _ACKNOWLEDGEREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullRequest'] = _STREAMINGPULLREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullResponse'] = _STREAMINGPULLRESPONSE +DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT +DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST +DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSnapshotRequest'] = _DELETESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['SeekRequest'] = _SEEKREQUEST +DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE + +Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( + DESCRIPTOR = _TOPIC, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) + )) +_sym_db.RegisterMessage(Topic) + +PubsubMessage = _reflection.GeneratedProtocolMessageType('PubsubMessage', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUBSUBMESSAGE_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) + )) + , + DESCRIPTOR = _PUBSUBMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) + )) +_sym_db.RegisterMessage(PubsubMessage) +_sym_db.RegisterMessage(PubsubMessage.AttributesEntry) + +GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) + )) +_sym_db.RegisterMessage(GetTopicRequest) + +PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) + )) +_sym_db.RegisterMessage(PublishRequest) + +PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) + )) +_sym_db.RegisterMessage(PublishResponse) + +ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) + )) +_sym_db.RegisterMessage(ListTopicsRequest) + +ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) + )) +_sym_db.RegisterMessage(ListTopicsResponse) + +ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsRequest) + +ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsResponse) + +DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) + )) +_sym_db.RegisterMessage(DeleteTopicRequest) + +Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTION, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) + )) +_sym_db.RegisterMessage(Subscription) + +PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUSHCONFIG_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) + )) + , + DESCRIPTOR = _PUSHCONFIG, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) + )) +_sym_db.RegisterMessage(PushConfig) +_sym_db.RegisterMessage(PushConfig.AttributesEntry) + +ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( + DESCRIPTOR = _RECEIVEDMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) + )) +_sym_db.RegisterMessage(ReceivedMessage) + +GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) + )) +_sym_db.RegisterMessage(GetSubscriptionRequest) + +UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) + )) +_sym_db.RegisterMessage(UpdateSubscriptionRequest) + +ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListSubscriptionsRequest) + +ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListSubscriptionsResponse) + +DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) + )) +_sym_db.RegisterMessage(DeleteSubscriptionRequest) + +ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) + )) +_sym_db.RegisterMessage(ModifyPushConfigRequest) + +PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( + DESCRIPTOR = _PULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) + )) +_sym_db.RegisterMessage(PullRequest) + +PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( + DESCRIPTOR = _PULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) + )) +_sym_db.RegisterMessage(PullResponse) + +ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) + )) +_sym_db.RegisterMessage(ModifyAckDeadlineRequest) + +AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( + DESCRIPTOR = _ACKNOWLEDGEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) + )) +_sym_db.RegisterMessage(AcknowledgeRequest) + +StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) + )) +_sym_db.RegisterMessage(StreamingPullRequest) + +StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) + )) +_sym_db.RegisterMessage(StreamingPullResponse) + +CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) + )) +_sym_db.RegisterMessage(CreateSnapshotRequest) + +Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( + DESCRIPTOR = _SNAPSHOT, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) + )) +_sym_db.RegisterMessage(Snapshot) + +ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) + )) +_sym_db.RegisterMessage(ListSnapshotsRequest) + +ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) + )) +_sym_db.RegisterMessage(ListSnapshotsResponse) + +DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) + )) +_sym_db.RegisterMessage(DeleteSnapshotRequest) + +SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( + DESCRIPTOR = _SEEKREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) + )) +_sym_db.RegisterMessage(SeekRequest) + +SeekResponse = _reflection.GeneratedProtocolMessageType('SeekResponse', (_message.Message,), dict( + DESCRIPTOR = _SEEKRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) + )) +_sym_db.RegisterMessage(SeekResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1')) +_PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True +_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUSHCONFIG_ATTRIBUTESENTRY.has_options = True +_PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=Subscription.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=GetSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=UpdateSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=ListSubscriptionsRequest.SerializeToString, + response_deserializer=ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=PullRequest.SerializeToString, + response_deserializer=PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=StreamingPullRequest.SerializeToString, + response_deserializer=StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=ListSnapshotsRequest.SerializeToString, + response_deserializer=ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=CreateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=SeekRequest.SerializeToString, + response_deserializer=SeekResponse.FromString, + ) + + + class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=Subscription.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=GetSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=UpdateSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=ListSubscriptionsRequest.FromString, + response_serializer=ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=PullRequest.FromString, + response_serializer=PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=StreamingPullRequest.FromString, + response_serializer=StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=ListSnapshotsRequest.FromString, + response_serializer=ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=CreateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=SeekRequest.FromString, + response_serializer=SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=Topic.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=PublishRequest.SerializeToString, + response_deserializer=PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=GetTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=ListTopicsRequest.SerializeToString, + response_deserializer=ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=Topic.FromString, + response_serializer=Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=PublishRequest.FromString, + response_serializer=PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=GetTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=ListTopicsRequest.FromString, + response_serializer=ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=ListTopicSubscriptionsRequest.FromString, + response_serializer=ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaSubscriberServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaSubscriberStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSubscription.future = None + def GetSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration details of a subscription. + """ + raise NotImplementedError() + GetSubscription.future = None + def UpdateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + raise NotImplementedError() + UpdateSubscription.future = None + def ListSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching subscriptions. + """ + raise NotImplementedError() + ListSubscriptions.future = None + def DeleteSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + raise NotImplementedError() + DeleteSubscription.future = None + def ModifyAckDeadline(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + raise NotImplementedError() + ModifyAckDeadline.future = None + def Acknowledge(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + raise NotImplementedError() + Acknowledge.future = None + def Pull(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + raise NotImplementedError() + Pull.future = None + def StreamingPull(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + raise NotImplementedError() + def ModifyPushConfig(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + raise NotImplementedError() + ModifyPushConfig.future = None + def ListSnapshots(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the existing snapshots. + """ + raise NotImplementedError() + ListSnapshots.future = None + def CreateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSnapshot.future = None + def DeleteSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + raise NotImplementedError() + DeleteSnapshot.future = None + def Seek(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + raise NotImplementedError() + Seek.future = None + + + def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): face_utilities.unary_unary_inline(servicer.Acknowledge), + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): face_utilities.unary_unary_inline(servicer.CreateSnapshot), + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): face_utilities.unary_unary_inline(servicer.CreateSubscription), + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): face_utilities.unary_unary_inline(servicer.DeleteSnapshot), + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): face_utilities.unary_unary_inline(servicer.DeleteSubscription), + ('google.pubsub.v1.Subscriber', 'GetSubscription'): face_utilities.unary_unary_inline(servicer.GetSubscription), + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): face_utilities.unary_unary_inline(servicer.ListSnapshots), + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): face_utilities.unary_unary_inline(servicer.ListSubscriptions), + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): face_utilities.unary_unary_inline(servicer.ModifyAckDeadline), + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): face_utilities.unary_unary_inline(servicer.ModifyPushConfig), + ('google.pubsub.v1.Subscriber', 'Pull'): face_utilities.unary_unary_inline(servicer.Pull), + ('google.pubsub.v1.Subscriber', 'Seek'): face_utilities.unary_unary_inline(servicer.Seek), + ('google.pubsub.v1.Subscriber', 'StreamingPull'): face_utilities.stream_stream_inline(servicer.StreamingPull), + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): face_utilities.unary_unary_inline(servicer.UpdateSubscription), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.FromString, + } + cardinalities = { + 'Acknowledge': cardinality.Cardinality.UNARY_UNARY, + 'CreateSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'CreateSubscription': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSubscription': cardinality.Cardinality.UNARY_UNARY, + 'GetSubscription': cardinality.Cardinality.UNARY_UNARY, + 'ListSnapshots': cardinality.Cardinality.UNARY_UNARY, + 'ListSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ModifyAckDeadline': cardinality.Cardinality.UNARY_UNARY, + 'ModifyPushConfig': cardinality.Cardinality.UNARY_UNARY, + 'Pull': cardinality.Cardinality.UNARY_UNARY, + 'Seek': cardinality.Cardinality.UNARY_UNARY, + 'StreamingPull': cardinality.Cardinality.STREAM_STREAM, + 'UpdateSubscription': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Subscriber', cardinalities, options=stub_options) + + + class BetaPublisherServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaPublisherStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates the given topic with the given name. + """ + raise NotImplementedError() + CreateTopic.future = None + def Publish(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + raise NotImplementedError() + Publish.future = None + def GetTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration of a topic. + """ + raise NotImplementedError() + GetTopic.future = None + def ListTopics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching topics. + """ + raise NotImplementedError() + ListTopics.future = None + def ListTopicSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the name of the subscriptions for this topic. + """ + raise NotImplementedError() + ListTopicSubscriptions.future = None + def DeleteTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + raise NotImplementedError() + DeleteTopic.future = None + + + def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): face_utilities.unary_unary_inline(servicer.CreateTopic), + ('google.pubsub.v1.Publisher', 'DeleteTopic'): face_utilities.unary_unary_inline(servicer.DeleteTopic), + ('google.pubsub.v1.Publisher', 'GetTopic'): face_utilities.unary_unary_inline(servicer.GetTopic), + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): face_utilities.unary_unary_inline(servicer.ListTopicSubscriptions), + ('google.pubsub.v1.Publisher', 'ListTopics'): face_utilities.unary_unary_inline(servicer.ListTopics), + ('google.pubsub.v1.Publisher', 'Publish'): face_utilities.unary_unary_inline(servicer.Publish), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.FromString, + } + cardinalities = { + 'CreateTopic': cardinality.Cardinality.UNARY_UNARY, + 'DeleteTopic': cardinality.Cardinality.UNARY_UNARY, + 'GetTopic': cardinality.Cardinality.UNARY_UNARY, + 'ListTopicSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ListTopics': cardinality.Cardinality.UNARY_UNARY, + 'Publish': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Publisher', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py new file mode 100644 index 000000000000..5a970cbc77ab --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py @@ -0,0 +1,461 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.pubsub.v1.pubsub_pb2 as google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.FromString, + ) + + +class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/pubsub/google/cloud/pubsub.py b/pubsub/google/cloud/pubsub.py new file mode 100644 index 000000000000..5e77fbced96f --- /dev/null +++ b/pubsub/google/cloud/pubsub.py @@ -0,0 +1,24 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import PublisherClient +from google.cloud.pubsub_v1 import SubscriberClient + + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', +) diff --git a/pubsub/google/cloud/pubsub/__init__.py b/pubsub/google/cloud/pubsub/__init__.py deleted file mode 100644 index 070e8243bf2b..000000000000 --- a/pubsub/google/cloud/pubsub/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google Cloud Pubsub API wrapper. - -The main concepts with this API are: - -- :class:`~google.cloud.pubsub.topic.Topic` represents an endpoint to which - messages can be published using the Cloud Storage Pubsub API. - -- :class:`~google.cloud.pubsub.subscription.Subscription` represents a named - subscription (either pull or push) to a topic. -""" - - -from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-pubsub').version - -from google.cloud.pubsub.client import Client -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -__all__ = ['__version__', 'Client', 'Subscription', 'Topic'] diff --git a/pubsub/google/cloud/pubsub/_gax.py b/pubsub/google/cloud/pubsub/_gax.py deleted file mode 100644 index 730192755221..000000000000 --- a/pubsub/google/cloud/pubsub/_gax.py +++ /dev/null @@ -1,796 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GAX wrapper for Pubsub API requests.""" - -import functools - -from google.cloud.gapic.pubsub.v1.publisher_client import PublisherClient -from google.cloud.gapic.pubsub.v1.subscriber_client import SubscriberClient -from google.gax import CallOptions -from google.gax import INITIAL_PAGE -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from google.protobuf.json_format import MessageToDict -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PubsubMessage -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig -from grpc import insecure_channel -from grpc import StatusCode - -from google.cloud._helpers import _to_bytes -from google.cloud._helpers import _pb_timestamp_to_rfc3339 -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud._helpers import make_secure_channel -from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import NotFound -from google.cloud.iterator import GAXIterator -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type gax_api: :class:`.publisher_client.PublisherClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_topics(self, project, page_size=0, page_token=None): - """List topics for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_topics( - path, page_size=page_size, options=options) - return GAXIterator(self._client, page_iter, _item_to_topic) - - def topic_create(self, topic_path): - """API call: create a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the topic already - exists - """ - try: - topic_pb = self._gax_api.create_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - raise Conflict(topic_path) - raise - return {'name': topic_pb.name} - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - try: - topic_pb = self._gax_api.get_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return {'name': topic_pb.name} - - def topic_delete(self, topic_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - """ - try: - self._gax_api.delete_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - def topic_publish(self, topic_path, messages, timeout=30): - """API call: publish one or more messages to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :type timeout: int - :param timeout: (Optional) Timeout seconds. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - options = CallOptions(is_bundling=False, timeout=timeout) - message_pbs = [_message_pb_from_mapping(message) - for message in messages] - try: - result = self._gax_api.publish(topic_path, message_pbs, - options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return result.message_ids - - def topic_list_subscriptions(self, topic, page_size=0, page_token=None): - """API call: list subscriptions bound to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - :raises: :exc:`~google.cloud.exceptions.NotFound` if the topic does - not exist. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - topic_path = topic.full_name - try: - page_iter = self._gax_api.list_topic_subscriptions( - topic_path, page_size=page_size, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - iterator = GAXIterator(self._client, page_iter, - _item_to_subscription_for_topic) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type gax_api: :class:`.publisher_client.SubscriberClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_subscriptions(self, project, page_size=0, page_token=None): - """List subscriptions for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_subscriptions( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return GAXIterator(self._client, page_iter, item_to_value) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - if push_endpoint is not None: - push_config = PushConfig(push_endpoint=push_endpoint) - else: - push_config = None - - if message_retention_duration is not None: - message_retention_duration = _timedelta_to_duration_pb( - message_retention_duration) - - try: - sub_pb = self._gax_api.create_subscription( - subscription_path, topic_path, - push_config=push_config, ack_deadline_seconds=ack_deadline, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - raise Conflict(topic_path) - raise - return MessageToDict(sub_pb) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - try: - sub_pb = self._gax_api.get_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(sub_pb) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - try: - self._gax_api.delete_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - push_config = PushConfig(push_endpoint=push_endpoint) - try: - self._gax_api.modify_push_config(subscription_path, push_config) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to pull from, in - format ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - try: - response_pb = self._gax_api.pull( - subscription_path, max_messages, - return_immediately=return_immediately) - except GaxError as exc: - code = exc_to_code(exc.cause) - if code == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - elif code == StatusCode.DEADLINE_EXCEEDED: - # NOTE: The JSON-over-HTTP API returns a 200 with an empty - # response when ``return_immediately`` is ``False``, so - # we "mutate" the gRPC error into a non-error to conform. - if not return_immediately: - return [] - raise - return [_received_message_pb_to_mapping(rmpb) - for rmpb in response_pb.received_messages] - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - try: - self._gax_api.acknowledge(subscription_path, ack_ids) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - try: - self._gax_api.modify_ack_deadline( - subscription_path, ack_ids, ack_deadline) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: :class:`.timestamp_pb2.Timestamp` - :param time: The time to seek to. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - try: - self._gax_api.seek(subscription_path, time=time, snapshot=snapshot) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def list_snapshots(self, project, page_size=0, page_token=None): - """List snapshots for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_snapshots( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return GAXIterator(self._client, page_iter, item_to_value) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the snapshot - already exists - :raises: :exc:`google.cloud.exceptions.NotFound` if the subscription - does not exist - """ - try: - snapshot_pb = self._gax_api.create_snapshot( - snapshot_path, subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - raise Conflict(snapshot_path) - elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(snapshot_pb) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :raises: :exc:`google.cloud.exceptions.NotFound` if the snapshot does - not exist - """ - try: - self._gax_api.delete_snapshot(snapshot_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(snapshot_path) - raise - - -def _message_pb_from_mapping(message): - """Helper for :meth:`_PublisherAPI.topic_publish`. - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return PubsubMessage(data=_to_bytes(message['data']), - attributes=message['attributes']) - - -def _message_pb_to_mapping(message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'messageId': message_pb.message_id, - 'data': message_pb.data, - 'attributes': message_pb.attributes, - 'publishTime': _pb_timestamp_to_rfc3339(message_pb.publish_time), - } - - -def _received_message_pb_to_mapping(received_message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'ackId': received_message_pb.ack_id, - 'message': _message_pb_to_mapping( - received_message_pb.message), - } - - -def make_gax_publisher_api(credentials=None, host=None): - """Create an instance of the GAX Publisher API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.publisher_client.PublisherClient` - :returns: A publisher API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - PublisherClient.SERVICE_ADDRESS) - return PublisherClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def make_gax_subscriber_api(credentials=None, host=None): - """Create an instance of the GAX Subscriber API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.subscriber_client.SubscriberClient` - :returns: A subscriber API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - SubscriberClient.SERVICE_ADDRESS) - return SubscriberClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def _item_to_topic(iterator, resource): - """Convert a protobuf topic to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: :class:`.pubsub_pb2.Topic` - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr( - {'name': resource.name}, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, sub_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Subscription` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(sub_pb) - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, snapshot_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Snapshot` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(snapshot_pb) - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/pubsub/google/cloud/pubsub/_helpers.py b/pubsub/google/cloud/pubsub/_helpers.py deleted file mode 100644 index 2f021f20ab3e..000000000000 --- a/pubsub/google/cloud/pubsub/_helpers.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper functions for shared behavior.""" - -import re - -from google.cloud._helpers import _name_from_project_path - - -_TOPIC_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /topics/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -_SUBSCRIPTION_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /subscriptions/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -def topic_name_from_path(path, project): - """Validate a topic URI path and get the topic name. - - :type path: str - :param path: URI path for a topic API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: Topic name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _TOPIC_TEMPLATE) - - -def subscription_name_from_path(path, project): - """Validate a subscription URI path and get the subscription name. - - :type path: str - :param path: URI path for a subscription API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: subscription name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _SUBSCRIPTION_TEMPLATE) diff --git a/pubsub/google/cloud/pubsub/_http.py b/pubsub/google/cloud/pubsub/_http.py deleted file mode 100644 index 0c059df7453a..000000000000 --- a/pubsub/google/cloud/pubsub/_http.py +++ /dev/null @@ -1,782 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Interact with Google Cloud Pub/Sub via JSON-over-HTTP.""" - -import base64 -import copy -import functools -import os - -from google.cloud import _http -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.iterator import HTTPIterator - -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - - -PUBSUB_API_HOST = 'pubsub.googleapis.com' -"""Pub / Sub API request host.""" - -_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) - - -class Connection(_http.JSONConnection): - """A connection to Google Cloud Pub/Sub via the JSON REST API. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns the current connection. - """ - - API_BASE_URL = 'https://' + PUBSUB_API_HOST - """The base of the API call URL.""" - - API_VERSION = 'v1' - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' - """A template for the URL of a particular API call.""" - - _EXTRA_HEADERS = { - _http.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - - def __init__(self, client): - super(Connection, self).__init__(client) - emulator_host = os.getenv(PUBSUB_EMULATOR) - if emulator_host is None: - self.host = self.__class__.API_BASE_URL - self.api_base_url = self.__class__.API_BASE_URL - self.in_emulator = False - else: - self.host = emulator_host - self.api_base_url = 'http://' + emulator_host - self.in_emulator = True - - def build_api_url(self, path, query_params=None, - api_base_url=None, api_version=None): - """Construct an API url given a few components, some optional. - - Typically, you shouldn't need to use this method. - - :type path: str - :param path: The path to the resource. - - :type query_params: dict or list - :param query_params: A dictionary of keys and values (or list of - key-value pairs) to insert into the query - string of the URL. - - :type api_base_url: str - :param api_base_url: The base URL for the API endpoint. - Typically you won't have to provide this. - - :type api_version: str - :param api_version: The version of the API to call. - Typically you shouldn't provide this and instead - use the default for the library. - - :rtype: str - :returns: The URL assembled from the pieces provided. - """ - if api_base_url is None: - api_base_url = self.api_base_url - return super(Connection, self.__class__).build_api_url( - path, query_params=query_params, - api_base_url=api_base_url, api_version=api_version) - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_topics(self, project, page_size=None, page_token=None): - """API call: list topics for a given project - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current client. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/topics' % (project,) - - return HTTPIterator( - client=self._client, path=path, item_to_value=_item_to_topic, - items_key='topics', page_token=page_token, - extra_params=extra_params) - - def topic_create(self, topic_path): - """API call: create a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: the fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='PUT', path='/%s' % (topic_path,)) - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='GET', path='/%s' % (topic_path,)) - - def topic_delete(self, topic_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - """ - self.api_request(method='DELETE', path='/%s' % (topic_path,)) - - def topic_publish(self, topic_path, messages): - """API call: publish one or more messages to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - """ - messages_to_send = copy.deepcopy(messages) - _transform_messages_base64(messages_to_send, _base64_unicode) - data = {'messages': messages_to_send} - response = self.api_request( - method='POST', path='/%s:publish' % (topic_path,), data=data) - return response['messageIds'] - - def topic_list_subscriptions(self, topic, page_size=None, page_token=None): - """API call: list subscriptions bound to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: list of strings - :returns: fully-qualified names of subscriptions for the supplied - topic. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/%s/subscriptions' % (topic.full_name,) - - iterator = HTTPIterator( - client=self._client, path=path, - item_to_value=_item_to_subscription_for_topic, - items_key='subscriptions', - page_token=page_token, extra_params=extra_params) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_subscriptions(self, project, page_size=None, page_token=None): - """API call: list subscriptions for a given project - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/subscriptions' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return HTTPIterator( - client=self._client, path=path, item_to_value=item_to_value, - items_key='subscriptions', page_token=page_token, - extra_params=extra_params) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - resource = {'topic': topic_path} - - if ack_deadline is not None: - resource['ackDeadlineSeconds'] = ack_deadline - - if push_endpoint is not None: - resource['pushConfig'] = {'pushEndpoint': push_endpoint} - - if retain_acked_messages is not None: - resource['retainAckedMessages'] = retain_acked_messages - - if message_retention_duration is not None: - pb = _timedelta_to_duration_pb(message_retention_duration) - resource['messageRetentionDuration'] = { - 'seconds': pb.seconds, - 'nanos': pb.nanos - } - - return self.api_request(method='PUT', path=path, data=resource) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - return self.api_request(method='GET', path=path) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - path = '/%s' % (subscription_path,) - self.api_request(method='DELETE', path=path) - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - path = '/%s:modifyPushConfig' % (subscription_path,) - resource = {'pushConfig': {'pushEndpoint': push_endpoint}} - self.api_request(method='POST', path=path, data=resource) - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - path = '/%s:pull' % (subscription_path,) - data = { - 'returnImmediately': return_immediately, - 'maxMessages': max_messages, - } - response = self.api_request(method='POST', path=path, data=data) - messages = response.get('receivedMessages', ()) - _transform_messages_base64(messages, base64.b64decode, 'message') - return messages - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - path = '/%s:acknowledge' % (subscription_path,) - data = { - 'ackIds': ack_ids, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - path = '/%s:modifyAckDeadline' % (subscription_path,) - data = { - 'ackIds': ack_ids, - 'ackDeadlineSeconds': ack_deadline, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: str - :param time: The time to seek to, in RFC 3339 format. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - path = '/%s:seek' % (subscription_path,) - data = {} - if time is not None: - data['time'] = time - if snapshot is not None: - data['snapshot'] = snapshot - self.api_request(method='POST', path=path, data=data) - - def list_snapshots(self, project, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/snapshots' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return HTTPIterator( - client=self._client, path=path, item_to_value=item_to_value, - items_key='snapshots', page_token=page_token, - extra_params=extra_params) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - """ - path = '/%s' % (snapshot_path,) - data = {'subscription': subscription_path} - return self.api_request(method='PUT', path=path, data=data) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - """ - path = '/%s' % (snapshot_path,) - self.api_request(method='DELETE', path=path) - - -class _IAMPolicyAPI(object): - """Helper mapping IAM policy-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self.api_request = client._connection.api_request - - def get_iam_policy(self, target_path): - """API call: fetch the IAM policy for the target - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - path = '/%s:getIamPolicy' % (target_path,) - return self.api_request(method='GET', path=path) - - def set_iam_policy(self, target_path, policy): - """API call: update the IAM policy for the target - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :type policy: dict - :param policy: the new policy resource. - - :rtype: dict - :returns: the resource returned by the ``setIamPolicy`` API request. - """ - wrapped = {'policy': policy} - path = '/%s:setIamPolicy' % (target_path,) - return self.api_request(method='POST', path=path, data=wrapped) - - def test_iam_permissions(self, target_path, permissions): - """API call: test permissions - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - :type target_path: str - :param target_path: the path of the target object. - - :type permissions: list of string - :param permissions: the permissions to check - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - wrapped = {'permissions': permissions} - path = '/%s:testIamPermissions' % (target_path,) - resp = self.api_request(method='POST', path=path, data=wrapped) - return resp.get('permissions', []) - - -def _base64_unicode(value): - """Helper to base64 encode and make JSON serializable. - - :type value: str - :param value: String value to be base64 encoded and made serializable. - - :rtype: str - :returns: Base64 encoded string/unicode value. - """ - as_bytes = base64.b64encode(value) - return as_bytes.decode('ascii') - - -def _transform_messages_base64(messages, transform, key=None): - """Helper for base64 encoding and decoding messages. - - :type messages: list - :param messages: List of dictionaries with message data. - - :type transform: :class:`~types.FunctionType` - :param transform: Function to encode/decode the message data. - - :type key: str - :param key: Index to access messages. - """ - for message in messages: - if key is not None: - message = message[key] - if 'data' in message: - message['data'] = transform(message['data']) - - -def _item_to_topic(iterator, resource): - """Convert a JSON topic to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr(resource, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/pubsub/google/cloud/pubsub/client.py b/pubsub/google/cloud/pubsub/client.py deleted file mode 100644 index 1df95a2400de..000000000000 --- a/pubsub/google/cloud/pubsub/client.py +++ /dev/null @@ -1,283 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google Cloud Pub/Sub API.""" - -import os - -from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.pubsub._http import Connection -from google.cloud.pubsub._http import _PublisherAPI as JSONPublisherAPI -from google.cloud.pubsub._http import _SubscriberAPI as JSONSubscriberAPI -from google.cloud.pubsub._http import _IAMPolicyAPI -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -try: - from google.cloud.pubsub._gax import _PublisherAPI as GAXPublisherAPI - from google.cloud.pubsub._gax import _SubscriberAPI as GAXSubscriberAPI - from google.cloud.pubsub._gax import make_gax_publisher_api - from google.cloud.pubsub._gax import make_gax_subscriber_api -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - GAXPublisherAPI = None - GAXSubscriberAPI = None - make_gax_publisher_api = None - make_gax_subscriber_api = None -else: - _HAVE_GRPC = True - - -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC - - -class Client(ClientWithProject): - """Client to bundle configuration needed for API requests. - - :type project: str - :param project: the project which the client acts on behalf of. Will be - passed when creating a topic. If not passed, - falls back to the default inferred from the environment. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``_http`` object is - passed), falls back to the default inferred from the - environment. - - :type _http: :class:`~httplib2.Http` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type _use_grpc: bool - :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` - environment variable. - This parameter should be considered private, and could - change in the future. - """ - - _publisher_api = None - _subscriber_api = None - _iam_policy_api = None - - SCOPE = ('https://www.googleapis.com/auth/pubsub', - 'https://www.googleapis.com/auth/cloud-platform') - """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" - - def __init__(self, project=None, credentials=None, - _http=None, _use_grpc=None): - super(Client, self).__init__( - project=project, credentials=credentials, _http=_http) - self._connection = Connection(self) - if _use_grpc is None: - self._use_grpc = _USE_GRPC - else: - self._use_grpc = _use_grpc - - @property - def publisher_api(self): - """Helper for publisher-related API calls.""" - if self._publisher_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_publisher_api( - host=self._connection.host) - else: - generated = make_gax_publisher_api( - credentials=self._credentials) - self._publisher_api = GAXPublisherAPI(generated, self) - else: - self._publisher_api = JSONPublisherAPI(self) - return self._publisher_api - - @property - def subscriber_api(self): - """Helper for subscriber-related API calls.""" - if self._subscriber_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_subscriber_api( - host=self._connection.host) - else: - generated = make_gax_subscriber_api( - credentials=self._credentials) - self._subscriber_api = GAXSubscriberAPI(generated, self) - else: - self._subscriber_api = JSONSubscriberAPI(self) - return self._subscriber_api - - @property - def iam_policy_api(self): - """Helper for IAM policy-related API calls.""" - if self._iam_policy_api is None: - self._iam_policy_api = _IAMPolicyAPI(self) - return self._iam_policy_api - - def list_topics(self, page_size=None, page_token=None): - """List topics for the project associated with this client. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START client_list_topics] - :end-before: [END client_list_topics] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - api = self.publisher_api - return api.list_topics( - self.project, page_size, page_token) - - def list_subscriptions(self, page_size=None, page_token=None): - """List subscriptions for the project associated with this client. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START client_list_subscriptions] - :end-before: [END client_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current client. - """ - api = self.subscriber_api - return api.list_subscriptions( - self.project, page_size, page_token) - - def list_snapshots(self, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - api = self.subscriber_api - return api.list_snapshots( - self.project, page_size, page_token) - - def topic(self, name, timestamp_messages=False): - """Creates a topic bound to the current client. - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START client_topic] - :end-before: [END client_topic] - - :type name: str - :param name: the name of the topic to be constructed. - - :type timestamp_messages: bool - :param timestamp_messages: To be passed to ``Topic`` constructor. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic created with the current client. - """ - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current client. - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START client_subscription] - :end-before: [END client_subscription] - - :type name: str - :param name: the name of the subscription to be constructed. - - :type ack_deadline: int - :param ack_deadline: (Optional) The deadline (in seconds) by which - messages pulledfrom the back-end must be - acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. If unset, defaults to 7 days. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: Subscription created with the current client. - """ - return Subscription( - name, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, client=self) diff --git a/pubsub/google/cloud/pubsub/iam.py b/pubsub/google/cloud/pubsub/iam.py deleted file mode 100644 index 9c7e46af222a..000000000000 --- a/pubsub/google/cloud/pubsub/iam.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""PubSub API IAM policy definitions - -For allowed roles / permissions, see: -https://cloud.google.com/pubsub/access_control#permissions -""" - -import warnings - -# pylint: disable=unused-import -from google.cloud.iam import OWNER_ROLE # noqa - backward compat -from google.cloud.iam import EDITOR_ROLE # noqa - backward compat -from google.cloud.iam import VIEWER_ROLE # noqa - backward compat -# pylint: enable=unused-import -from google.cloud.iam import Policy as _BasePolicy -from google.cloud.iam import _ASSIGNMENT_DEPRECATED_MSG - -# Pubsub-specific IAM roles - -PUBSUB_ADMIN_ROLE = 'roles/pubsub.admin' -"""Role implying all rights to an object.""" - -PUBSUB_EDITOR_ROLE = 'roles/pubsub.editor' -"""Role implying rights to modify an object.""" - -PUBSUB_VIEWER_ROLE = 'roles/pubsub.viewer' -"""Role implying rights to access an object.""" - -PUBSUB_PUBLISHER_ROLE = 'roles/pubsub.publisher' -"""Role implying rights to publish to a topic.""" - -PUBSUB_SUBSCRIBER_ROLE = 'roles/pubsub.subscriber' -"""Role implying rights to subscribe to a topic.""" - - -# Pubsub-specific permissions - -PUBSUB_TOPICS_CONSUME = 'pubsub.topics.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_TOPICS_CREATE = 'pubsub.topics.create' -"""Permission: create topics.""" - -PUBSUB_TOPICS_DELETE = 'pubsub.topics.delete' -"""Permission: delete topics.""" - -PUBSUB_TOPICS_GET = 'pubsub.topics.get' -"""Permission: retrieve topics.""" - -PUBSUB_TOPICS_GET_IAM_POLICY = 'pubsub.topics.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_TOPICS_LIST = 'pubsub.topics.list' -"""Permission: list topics.""" - -PUBSUB_TOPICS_SET_IAM_POLICY = 'pubsub.topics.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_CONSUME = 'pubsub.subscriptions.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_SUBSCRIPTIONS_CREATE = 'pubsub.subscriptions.create' -"""Permission: create subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_DELETE = 'pubsub.subscriptions.delete' -"""Permission: delete subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET = 'pubsub.subscriptions.get' -"""Permission: retrieve subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY = 'pubsub.subscriptions.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_LIST = 'pubsub.subscriptions.list' -"""Permission: list subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_SET_IAM_POLICY = 'pubsub.subscriptions.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_UPDATE = 'pubsub.subscriptions.update' -"""Permission: update subscriptions.""" - - -class Policy(_BasePolicy): - """IAM Policy / Bindings. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding - """ - _OWNER_ROLES = (OWNER_ROLE, PUBSUB_ADMIN_ROLE) - """Roles mapped onto our ``owners`` attribute.""" - - _EDITOR_ROLES = (EDITOR_ROLE, PUBSUB_EDITOR_ROLE) - """Roles mapped onto our ``editors`` attribute.""" - - _VIEWER_ROLES = (VIEWER_ROLE, PUBSUB_VIEWER_ROLE) - """Roles mapped onto our ``viewers`` attribute.""" - - @property - def publishers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_PUBLISHER_ROLE, ())) - - @publishers.setter - def publishers(self, value): - """Update publishers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'publishers', PUBSUB_PUBLISHER_ROLE), - DeprecationWarning) - self[PUBSUB_PUBLISHER_ROLE] = value - - @property - def subscribers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_SUBSCRIBER_ROLE, ())) - - @subscribers.setter - def subscribers(self, value): - """Update subscribers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'subscribers', PUBSUB_SUBSCRIBER_ROLE), - DeprecationWarning) - self[PUBSUB_SUBSCRIBER_ROLE] = value diff --git a/pubsub/google/cloud/pubsub/message.py b/pubsub/google/cloud/pubsub/message.py deleted file mode 100644 index 6b93e3b890ed..000000000000 --- a/pubsub/google/cloud/pubsub/message.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -from google.cloud._helpers import _rfc3339_to_datetime - - -class Message(object): - """Messages can be published to a topic and received by subscribers. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage - - :type data: bytes - :param data: the payload of the message. - - :type message_id: str - :param message_id: An ID assigned to the message by the API. - - :type attributes: dict - :param attributes: - (Optional) Extra metadata associated by the publisher with the message. - """ - _service_timestamp = None - - def __init__(self, data, message_id, attributes=None): - self.data = data - self.message_id = message_id - self._attributes = attributes - - @property - def attributes(self): - """Lazily-constructed attribute dictionary.""" - if self._attributes is None: - self._attributes = {} - return self._attributes - - @property - def timestamp(self): - """Return sortable timestamp from attributes, if passed. - - Allows sorting messages in publication order (assuming consistent - clocks across all publishers). - - :rtype: :class:`datetime.datetime` - :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp - :raises: ValueError if timestamp not in ``attributes``, or if it does - not match the RFC 3339 format. - """ - stamp = self.attributes.get('timestamp') - if stamp is None: - raise ValueError('No timestamp') - return _rfc3339_to_datetime(stamp) - - @property - def service_timestamp(self): - """Return server-set timestamp. - - :rtype: str - :returns: timestamp (in UTC timezone) in RFC 3339 format - """ - return self._service_timestamp - - @classmethod - def from_api_repr(cls, api_repr): - """Factory: construct message from API representation. - - :type api_repr: dict - :param api_repr: (Optional) The API representation of the message - - :rtype: :class:`Message` - :returns: The message created from the response. - """ - data = api_repr.get('data', b'') - instance = cls( - data=data, message_id=api_repr['messageId'], - attributes=api_repr.get('attributes')) - instance._service_timestamp = api_repr.get('publishTime') - return instance diff --git a/pubsub/google/cloud/pubsub/snapshot.py b/pubsub/google/cloud/pubsub/snapshot.py deleted file mode 100644 index 557ea93818d6..000000000000 --- a/pubsub/google/cloud/pubsub/snapshot.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Snapshots.""" - -from google.cloud.pubsub._helpers import topic_name_from_path - - -class Snapshot(object): - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.snapshots.topic`` when topic has been deleted.""" - - def __init__(self, name, subscription=None, topic=None, client=None): - - num_kwargs = len( - [param for param in (subscription, topic, client) if param]) - if num_kwargs != 1: - raise TypeError( - "Pass only one of 'subscription', 'topic', 'client'.") - - self.name = name - self.topic = topic or getattr(subscription, 'topic', None) - self._subscription = subscription - self._client = client or getattr( - subscription, '_client', None) or topic._client - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a subscription given its API representation - - :type resource: dict - :param resource: snapshot resource representation returned from the - API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration. - - :type subscriptions: dict - :param subscriptions: - (Optional) A Subscription to which this snapshot belongs. If not - passed, the subscription will have a newly-created subscription. - Must have the same topic as the snapshot. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - if topic is None: - return cls(name, client=client) - return cls(name, topic=topic) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/snapshots/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - if not self._subscription: - raise RuntimeError( - 'Cannot create a snapshot not bound to a subscription') - - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_create(self.full_name, self._subscription.full_name) - - def delete(self, client=None): - """API call: delete the snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_delete(self.full_name) diff --git a/pubsub/google/cloud/pubsub/subscription.py b/pubsub/google/cloud/pubsub/subscription.py deleted file mode 100644 index 538913cca33e..000000000000 --- a/pubsub/google/cloud/pubsub/subscription.py +++ /dev/null @@ -1,590 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Subscriptions.""" - -import datetime - -from google.cloud.exceptions import NotFound -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.message import Message - - -class Subscription(object): - """Subscriptions receive messages published to their topics. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions - - :type name: str - :param name: the name of the subscription. - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: - (Optional) the topic to which the subscription belongs; if ``None``, - the subscription's topic has been deleted. - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. If - not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: - (Optional) The client to use. If not passed, falls back to the - ``client`` stored on the topic. - """ - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.subscriptions.topic`` when topic has been deleted. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions#Subscription.FIELDS.topic - """ - - def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, message_retention_duration=None, - client=None): - - if client is None and topic is None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - if client is not None and topic is not None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - self.name = name - self.topic = topic - self._client = client or topic._client - self.ack_deadline = ack_deadline - self.push_endpoint = push_endpoint - self.retain_acked_messages = retain_acked_messages - self.message_retention_duration = message_retention_duration - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for a topic. - - :type topics: dict - :param topics: - (Optional) A mapping of topic names -> topics. If not passed, the - subscription will have a newly-created topic. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - ack_deadline = resource.get('ackDeadlineSeconds') - push_config = resource.get('pushConfig', {}) - push_endpoint = push_config.get('pushEndpoint') - retain_acked_messages = resource.get('retainAckedMessages') - resource_duration = resource.get('duration', {}) - message_retention_duration = datetime.timedelta( - seconds=resource_duration.get('seconds', 0), - microseconds=resource_duration.get('nanos', 0) / 1000) - if topic is None: - return cls(name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - client=client) - return cls(name, topic=topic, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/subscriptions/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def auto_ack(self, return_immediately=False, max_messages=1, client=None): - """:class:`AutoAck` factory - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - - :rtype: :class:`AutoAck` - :returns: the instance created for the given ``ack_id`` and ``message`` - """ - return AutoAck(self, return_immediately, max_messages, client) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the subscription via a PUT request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_create] - :end-before: [END subscription_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_create( - self.full_name, self.topic.full_name, - ack_deadline=self.ack_deadline, push_endpoint=self.push_endpoint, - retain_acked_messages=self.retain_acked_messages, - message_retention_duration=self.message_retention_duration) - - def exists(self, client=None): - """API call: test existence of the subscription via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_exists] - :end-before: [END subscription_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: bool - :returns: Boolean indicating existence of the subscription. - """ - client = self._require_client(client) - api = client.subscriber_api - try: - api.subscription_get(self.full_name) - except NotFound: - return False - else: - return True - - def reload(self, client=None): - """API call: sync local subscription configuration via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :attr:`ack_deadline` and :attr:`push_endpoint` might never have - been set locally, or might have been updated by another client. This - method fetches their values from the server. - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_reload] - :end-before: [END subscription_reload] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - data = api.subscription_get(self.full_name) - self.ack_deadline = data.get('ackDeadlineSeconds') - push_config = data.get('pushConfig', {}) - self.push_endpoint = push_config.get('pushEndpoint') - if self.topic is None and 'topic' in data: - topic_name = topic_name_from_path(data['topic'], client.project) - self.topic = client.topic(topic_name) - - def delete(self, client=None): - """API call: delete the subscription via a DELETE request. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_delete] - :end-before: [END subscription_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_delete(self.full_name) - - def modify_push_configuration(self, push_endpoint, client=None): - """API call: update the push endpoint for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_push_pull] - :end-before: [END subscription_push_pull] - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_pull_push] - :end-before: [END subscription_pull_push] - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If None, the application must pull - messages. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_push_config(self.full_name, push_endpoint) - self.push_endpoint = push_endpoint - - def pull(self, return_immediately=False, max_messages=1, client=None): - """API call: retrieve messages for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_pull] - :end-before: [END subscription_pull] - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: list of (ack_id, message) tuples - :returns: sequence of tuples: ``ack_id`` is the ID to be used in a - subsequent call to :meth:`acknowledge`, and ``message`` - is an instance of - :class:`~google.cloud.pubsub.message.Message`. - """ - client = self._require_client(client) - api = client.subscriber_api - response = api.subscription_pull( - self.full_name, return_immediately, max_messages) - return [(info['ackId'], Message.from_api_repr(info['message'])) - for info in response] - - def acknowledge(self, ack_ids, client=None): - """API call: acknowledge retrieved messages for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/acknowledge - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_acknowledge] - :end-before: [END subscription_acknowledge] - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_acknowledge(self.full_name, ack_ids) - - def modify_ack_deadline(self, ack_ids, ack_deadline, client=None): - """API call: update acknowledgement deadline for a retrieved message. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being updated - - :type ack_deadline: int - :param ack_deadline: new deadline for the message, in seconds - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_ack_deadline( - self.full_name, ack_ids, ack_deadline) - - def snapshot(self, name, client=None): - """Creates a snapshot of this subscription. - - :type name: str - :param name: the name of the subscription - - :rtype: :class:`Snapshot` - :returns: The snapshot created with the passed in arguments. - """ - return Snapshot(name, subscription=self) - - def seek_snapshot(self, snapshot, client=None): - """API call: seek a subscription to a given snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type snapshot: :class:`Snapshot` - :param snapshot: The snapshot to seek to. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_seek(self.full_name, snapshot=snapshot.full_name) - - def seek_timestamp(self, timestamp, client=None): - """API call: seek a subscription to a given point in time - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type time: :class:`datetime.datetime` - :param time: The time to seek to. - """ - client = self._require_client(client) - timestamp = _datetime_to_rfc3339(timestamp) - api = client.subscriber_api - api.subscription_seek(self.full_name, time=timestamp) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_get_iam_policy] - :end-before: [END subscription_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_set_iam_policy] - :end-before: [END subscription_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START subscription_check_iam_permissions] - :end-before: [END subscription_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class AutoAck(dict): - """Wrapper for :meth:`Subscription.pull` results. - - Mapping, tracks messages still-to-be-acknowledged. - - When used as a context manager, acknowledges all messages still in the - mapping on `__exit__`. When processing the pulled messages, application - code MUST delete messages from the :class:`AutoAck` mapping which are not - successfully processed, e.g.: - - .. code-block: python - - with AutoAck(subscription) as ack: # calls ``subscription.pull`` - for ack_id, message in ack.items(): - try: - do_something_with(message): - except: - del ack[ack_id] - - :type subscription: :class:`Subscription` - :param subscription: subscription to be pulled. - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - """ - def __init__(self, subscription, - return_immediately=False, max_messages=1, client=None): - super(AutoAck, self).__init__() - self._subscription = subscription - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - - def __enter__(self): - items = self._subscription.pull( - self._return_immediately, self._max_messages, self._client) - self.update(items) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if self: - self._subscription.acknowledge(list(self), self._client) diff --git a/pubsub/google/cloud/pubsub/topic.py b/pubsub/google/cloud/pubsub/topic.py deleted file mode 100644 index f16c9d99baed..000000000000 --- a/pubsub/google/cloud/pubsub/topic.py +++ /dev/null @@ -1,551 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -import base64 -import json -import time - -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud._helpers import _NOW -from google.cloud._helpers import _to_bytes -from google.cloud.exceptions import NotFound -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.subscription import Subscription - - -class Topic(object): - """Topics are targets to which messages can be published. - - Subscribers then receive those messages. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics - - :type name: str - :param name: the name of the topic - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: A client which holds credentials and project configuration - for the topic (which requires a project). - - :type timestamp_messages: bool - :param timestamp_messages: If true, the topic will add a ``timestamp`` key - to the attributes of each published message: - the value will be an RFC 3339 timestamp. - """ - def __init__(self, name, client, timestamp_messages=False): - self.name = name - self._client = client - self.timestamp_messages = timestamp_messages - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current topic. - - Example: pull-mode subcription, default parameter values - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_subscription_defaults] - :end-before: [END topic_subscription_defaults] - - Example: pull-mode subcription, override ``ack_deadline`` default - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_subscription_ack90] - :end-before: [END topic_subscription_ack90] - - Example: push-mode subcription - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_subscription_push] - :end-before: [END topic_subscription_push] - - :type name: str - :param name: the name of the subscription - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: :class:`Subscription` - :returns: The subscription created with the passed in arguments. - """ - return Subscription( - name, self, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for the topic. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic parsed from ``resource``. - :raises: :class:`ValueError` if ``client`` is not ``None`` and the - project from the resource does not agree with the project - from the client. - """ - topic_name = topic_name_from_path(resource['name'], client.project) - return cls(topic_name, client=client) - - @property - def project(self): - """Project bound to the topic.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in topic / subscription APIs""" - return 'projects/%s/topics/%s' % (self.project, self.name) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the topic via a PUT request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_create] - :end-before: [END topic_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_create(topic_path=self.full_name) - - def exists(self, client=None): - """API call: test for the existence of the topic via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_exists] - :end-before: [END topic_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: bool - :returns: Boolean indicating existence of the topic. - """ - client = self._require_client(client) - api = client.publisher_api - - try: - api.topic_get(topic_path=self.full_name) - except NotFound: - return False - else: - return True - - def delete(self, client=None): - """API call: delete the topic via a DELETE request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_delete] - :end-before: [END topic_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_delete(topic_path=self.full_name) - - def _timestamp_message(self, attrs): - """Add a timestamp to ``attrs``, if the topic is so configured. - - If ``attrs`` already has the key, do nothing. - - Helper method for ``publish``/``Batch.publish``. - """ - if self.timestamp_messages and 'timestamp' not in attrs: - attrs['timestamp'] = _datetime_to_rfc3339(_NOW()) - - def publish(self, message, client=None, **attrs): - """API call: publish a message to a topic via a POST request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - Example without message attributes: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_publish_simple_message] - :end-before: [END topic_publish_simple_message] - - With message attributes: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_publish_message_with_attrs] - :end-before: [END topic_publish_message_with_attrs] - - :type message: bytes - :param message: the message payload - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - - :rtype: str - :returns: message ID assigned by the server to the published message - """ - client = self._require_client(client) - api = client.publisher_api - - self._timestamp_message(attrs) - message_data = {'data': message, 'attributes': attrs} - message_ids = api.topic_publish(self.full_name, [message_data]) - return message_ids[0] - - def batch(self, client=None, **kwargs): - """Return a batch to use as a context manager. - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_batch] - :end-before: [END topic_batch] - - .. note:: - - The only API request happens during the ``__exit__()`` of the topic - used as a context manager, and only if the block exits without - raising an exception. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type kwargs: dict - :param kwargs: Keyword arguments passed to the - :class:`~google.cloud.pubsub.topic.Batch` constructor. - - :rtype: :class:`Batch` - :returns: A batch to use as a context manager. - """ - client = self._require_client(client) - return Batch(self, client, **kwargs) - - def list_subscriptions(self, page_size=None, page_token=None, client=None): - """List subscriptions for the project associated with this client. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_list_subscriptions] - :end-before: [END topic_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - return api.topic_list_subscriptions(self, page_size, page_token) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the topic. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_get_iam_policy] - :end-before: [END topic_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the topic. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_set_iam_policy] - :end-before: [END topic_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - - Example: - - .. literalinclude:: pubsub_snippets.py - :start-after: [START topic_check_iam_permissions] - :end-before: [END topic_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class Batch(object): - """Context manager: collect messages to publish via a single API call. - - Helper returned by :meth:Topic.batch - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: the topic being published - - :param client: The client to use. - :type client: :class:`google.cloud.pubsub.client.Client` - - :param max_interval: The maximum interval, in seconds, before the batch - will automatically commit. Note that this does not - run a background loop; it just checks when each - message is published. Therefore, this is intended - for situations where messages are published at - reasonably regular intervals. Defaults to infinity - (off). - :type max_interval: float - - :param max_messages: The maximum number of messages to hold in the batch - before automatically commiting. Defaults to infinity - (off). - :type max_messages: float - - :param max_size: The maximum size that the serialized messages can be - before automatically commiting. Defaults to 9 MB - (slightly less than the API limit). - :type max_size: int - """ - _INFINITY = float('inf') - - def __init__(self, topic, client, max_interval=_INFINITY, - max_messages=_INFINITY, max_size=1024 * 1024 * 9): - self.topic = topic - self.client = client - self.messages = [] - self.message_ids = [] - - # Set the autocommit rules. If the interval or number of messages - # is exceeded, then the .publish() method will imply a commit. - self._max_interval = max_interval - self._max_messages = max_messages - self._max_size = max_size - - # Set up the initial state, initializing messages, the starting - # timestamp, etc. - self._reset_state() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - - def __iter__(self): - return iter(self.message_ids) - - def _reset_state(self): - """Reset the state of this batch.""" - - del self.messages[:] - self._start_timestamp = time.time() - self._current_size = 0 - - def publish(self, message, **attrs): - """Emulate publishing a message, but save it. - - :type message: bytes - :param message: the message payload - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - """ - self.topic._timestamp_message(attrs) - - # Append the message to the list of messages.. - item = {'attributes': attrs, 'data': message} - self.messages.append(item) - - # Determine the approximate size of the message, and increment - # the current batch size appropriately. - encoded = base64.b64encode(_to_bytes(message)) - encoded += base64.b64encode( - json.dumps(attrs, ensure_ascii=False).encode('utf8'), - ) - self._current_size += len(encoded) - - # If too much time has elapsed since the first message - # was added, autocommit. - now = time.time() - if now - self._start_timestamp > self._max_interval: - self.commit() - return - - # If the number of messages on the list is greater than the - # maximum allowed, autocommit (with the batch's client). - if len(self.messages) >= self._max_messages: - self.commit() - return - - # If we have reached the max size, autocommit. - if self._current_size >= self._max_size: - self.commit() - return - - def commit(self, client=None): - """Send saved messages as a single API call. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - """ - if not self.messages: - return - - if client is None: - client = self.client - api = client.publisher_api - message_ids = api.topic_publish(self.topic.full_name, self.messages[:]) - self.message_ids.extend(message_ids) - self._reset_state() diff --git a/pubsub/google/cloud/pubsub_v1/__init__.py b/pubsub/google/cloud/pubsub_v1/__init__.py new file mode 100644 index 000000000000..55454e211015 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1.publisher import PublisherClient +from google.cloud.pubsub_v1.subscriber import SubscriberClient + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', +) diff --git a/pubsub/google/cloud/pubsub_v1/_gapic.py b/pubsub/google/cloud/pubsub_v1/_gapic.py new file mode 100644 index 000000000000..7379d91503b3 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/_gapic.py @@ -0,0 +1,53 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + + +def add_methods(SourceClass, blacklist=()): + """Add wrapped versions of the `api` member's methods to the class. + + Any methods passed in `blacklist` are not added. + Additionally, any methods explicitly defined on the wrapped class are + not added. + """ + def actual_decorator(cls): + # Reflectively iterate over most of the methods on the source class + # (the GAPIC) and make wrapped versions available on this client. + for name in dir(SourceClass): + # Ignore all private and magic methods. + if name.startswith('_'): + continue + + # Ignore anything on our blacklist. + if name in blacklist: + continue + + # Retrieve the attribute, and ignore it if it is not callable. + attr = getattr(self.api, name) + if not callable(attr): + continue + + # Add a wrapper method to this object. + fx = lambda self, *a, **kw: getattr(self.api, name)(*a, **kw) + fx.__name__ = name + fx.__doc__ = attr.__doc__ + setattr(self, name, fx) + + # Return the augmented class. + return cls + + # Simply return the actual decorator; this is returned from this method + # and actually used to decorate the class. + return actual_decorator diff --git a/pubsub/tests/unit/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py similarity index 77% rename from pubsub/tests/unit/__init__.py rename to pubsub/google/cloud/pubsub_v1/publisher/__init__.py index 58e0d9153632..60496983b352 100644 --- a/pubsub/tests/unit/__init__.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2017, Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,3 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from google.cloud.pubsub_v1.publisher.client import PublisherClient + + +__all__ = ( + 'PublisherClient', +) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py new file mode 100644 index 000000000000..cd5eee108c26 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -0,0 +1,144 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import time + + +Message = collections.namedtuple('Message', ['data', 'attrs']) + + +class Batch(object): + """A batch of messages. + + The batch is the internal group of messages which are either awaiting + publication or currently in-flight. + + A batch is automatically created by the PublisherClient when the first + message to be published is received; subsequent messages are added to + that batch until the process of actual publishing _starts_. + + Once this occurs, any new messages sent to ``publish`` open a new batch. + + If you are using this library, you most likely do not need to instantiate + batch objects directly; they will be created for you. If you want to + change the actual batching settings, see the ``batching`` argument on + :class:`google.cloud.pubsub_v1.PublisherClient`. + + Args: + client (:class:`google.cloud.pubsub_v1.PublisherClient`): The + publisher client used to create this batch. Batch settings are + inferred from this. + settings (:class:`google.cloud.pubsub_v1.types.Batching`): The + settings for batch publishing. These should be considered + immutable once the batch has been opened. + """ + def __init__(self, client, settings): + self._client = client + self._settings = settings + self._messages = self._client._queue_class() + self._status = 'accepting messages' + + # Continually monitor the thread until it is time to commit the + # batch, or the batch is explicitly committed. + self._client.thread_class(self.monitor) + + def commit(self): + """Actually publish all of the messages on the active batch. + + This moves the batch out from being the active batch to an in-flight + batch on the publisher, and then the batch is discarded upon + completion. + """ + # If this is the active batch on the cleint right now, remove it. + if self._client._batch is self: + self._client._batch = None + + # Add this to the set of in-flight batches, to ensure we are holding + # a reference. + self._client._in_flight_batches.add(self) + + def monitor(self): + """Commit this batch after sufficient time has elapsed. + + This simply sleeps for ``self._settings.max_latency`` seconds, + and then calls commit unless the batch has already been committed. + """ + # Note: This thread blocks; it is up to the calling code to call it + # in a separate thread. + # + # Sleep for however long we should be waiting. + time.sleep(self._settings.max_latency) + + # If, in the intervening period, the batch started to be committed, + # then no-op at this point. + if self._status != 'accepting messages': + return + + # Commit. + return self.commit() + + def publish(self, data, **attrs): + """Publish a single message. + + .. note:: + Messages in Pub/Sub are blobs of bytes. They are *binary* data, + not text. You must send data as a bytestring + (``bytes`` in Python 3; ``str`` in Python 2), and this library + will raise an exception if you send a text string. + + The reason that this is so important (and why we do not try to + coerce for you) is because Pub/Sub is also platform independent + and there is no way to know how to decode messages properly on + the other side; therefore, encoding and decoding is a required + exercise for the developer. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + Args: + data (bytes): A bytestring representing the message body. This + must be a bytestring (a text string will raise TypeError). + attrs (Mapping[str, str]): A dictionary of attributes to be + sent as metadata. (These may be text strings or byte strings.) + + Raises: + TypeError: If the ``data`` sent is not a bytestring, or if the + ``attrs`` are not either a ``str`` or ``bytes``. + + Returns: + Future: An object conforming to the ``concurrent.futures.Future`` + interface. + """ + # Sanity check: Is the data being sent as a bytestring? + # If it is literally anything else, complain loudly about it. + if not isinstance(data, six.binary_type): + raise TypeError('Data being published to Pub/Sub must be sent ' + 'as a bytestring.') + + # Coerce all attributes to text strings. + for k, v in copy(attrs).items(): + if isinstance(data, six.text_type): + continue + if isinstance(data, six.binary_type): + attrs[k] = v.decode('utf-8') + continue + raise TypeError('All attributes being published to Pub/Sub must ' + 'be sent as text strings.') + + # Add the message to the batch. + self._messages.put(Message(data=data, attrs=attrs)) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py new file mode 100644 index 000000000000..f2a904489102 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -0,0 +1,108 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import functools +import multiprocessing +import pkg_resources + +import six + +from google.cloud.gapic.pubsub.v1 import publisher_client + +from google.cloud.pubsub_v1 import types + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) +class PublisherClient(object): + """A publisher client for Cloud Pub/Sub. + + This creates an object that is capable of publishing messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + batching (:class:`google.cloud.pubsub_v1.types.Batching`): The + settings for batch publishing. + thread_class (class): Any class that is duck-type compatible with + :class:`threading.Thread`. + The default is :class:`multiprocessing.Process` + queue_class (class): Any class that is duck-type compatible with + :class:`queue.Queue`. + The default is :class:`multiprocessing.Queue`. + kwargs (dict): Any additional arguments provided are sent as keyword + arguments to the underlying + :class:`~gapic.pubsub.v1.publisher_client.PublisherClient`. + Generally, you should not need to set additional keyword arguments. + """ + + def __init__(self, batching=(), thread_class=multiprocessing.Process, + queue_class=multiprocessing.Queue, **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = publisher_client.PublisherClient(*args, **kwargs) + self.batching = types.Batching(batching) + + # Set the thread and queue classes. + self._thread_class = thread_class + self._queue_class = queue_class + + # The batch on the publisher client is responsible for holding + # messages. + # + # We set this to None for now; the first message that is published + # will create it (in order to ensure that the start time is correct). + self._batch = None + self._in_flight_batches = set() + + @property + def batch(self): + """Return the current batch. + + This will create a new batch if no batch currently exists. + + Returns: + :class:~`pubsub_v1.batch.Batch` The batch object. + """ + if self._batch is None: + self_batch = Batch(client=self, settings=self.batching) + return self._batch + + @property + def queue_class(self): + """Return the queue class provided at instantiation. + + Returns: + class: A class duck-type compatible with :class:`queue.Queue`. + """ + return self._queue_class + + @property + def thread_class(self): + """Return the thread class provided at instantiation. + + Returns: + class: A class duck-type compatible with :class:`threading.Thread`. + """ + return self._thread_class + + @functools.wraps(Batch.publish) + def publish(self, data, **attrs): + return self.batch.publish(data, *attrs) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber.py b/pubsub/google/cloud/pubsub_v1/subscriber.py new file mode 100644 index 000000000000..001e6abac9bb --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber.py @@ -0,0 +1,34 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import functools +import pkg_resources + +from google.cloud.gapic.pubsub.v1 import subscriber_client + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +class SubscriberClient(subscriber_client.SubscriberClient): + @functools.wraps(subscriber_client.SubscriberClient.__init__) + def __init__(self, *args, **kwargs): + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + super(SubscriberClient, self).__init__(*args, **kwargs) + + def get_subscription(self, subscription, options=None): + """Return the """ diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py new file mode 100644 index 000000000000..bb7c6fe2721c --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -0,0 +1,60 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import sys + +import psutil + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.gax.utils.messages import get_messages + + +# Define the default values for batching. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +Batching = collections.namedtuple('Batching', + ['max_bytes', 'max_latency', 'max_messages'], +) +Batching.__new__.__defaults__ = ( + 1024 * 1024 * 5, # max_bytes: 5 MB + 0.001, # max_latency: 1 millisecond + 1000, # max_messages: 1,000 +) + +# Define the type class and default values for flow control settings. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +FlowControl = collections.namedtuple('FlowControl', + ['max_bytes', 'max_messages'], +) +FlowControl.__new__.__defaults__ = ( + psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM + float('inf'), # max_messages: no limit +) + + +names = ['Batching', 'FlowControl'] +for name, message in get_messages(pubsub_pb2).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/pubsub/setup.py b/pubsub/setup.py index b1b1375ed870..4ff4c6b5cc66 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-packages@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,9 +51,8 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.1, < 0.25dev', 'grpcio >= 1.0.2, < 2.0dev', - 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', + 'psutil >= 5.2.2, < 6.0dev', ] setup( diff --git a/pubsub/tests/system.py b/pubsub/tests/system.py deleted file mode 100644 index acdbde0dffca..000000000000 --- a/pubsub/tests/system.py +++ /dev/null @@ -1,393 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import os -import unittest - -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from grpc import StatusCode -import httplib2 - -from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.pubsub import client - -from test_utils.retry import RetryInstanceState -from test_utils.retry import RetryResult -from test_utils.retry import RetryErrors -from test_utils.system import EmulatorCreds -from test_utils.system import unique_resource_id - - -def _unavailable(exc): - return exc_to_code(exc) == StatusCode.UNAVAILABLE - - -retry_unavailable = RetryErrors(GaxError, _unavailable) - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - CLIENT = None - IN_EMULATOR = False - - -def setUpModule(): - Config.IN_EMULATOR = os.getenv(PUBSUB_EMULATOR) is not None - if Config.IN_EMULATOR: - credentials = EmulatorCreds() - http = httplib2.Http() # Un-authorized. - Config.CLIENT = client.Client(credentials=credentials, - _http=http) - else: - Config.CLIENT = client.Client() - - -def _consume_topics(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve topics. - - :rtype: list - :returns: List of all topics encountered. - """ - return list(pubsub_client.list_topics()) - - -def _consume_snapshots(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve snapshots. - - :rtype: list - :returns: List of all snapshots encountered. - """ - return list(pubsub_client.list_snapshots()) - - -def _consume_subscriptions(topic): - """Consume entire iterator. - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: Topic to use to retrieve subscriptions. - - :rtype: list - :returns: List of all subscriptions encountered. - """ - return list(topic.list_subscriptions()) - - -class TestPubsub(unittest.TestCase): - - def setUp(self): - self.to_delete = [] - - def tearDown(self): - for doomed in self.to_delete: - doomed.delete() - - def test_create_topic(self): - topic_name = 'a-new-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - self.assertTrue(topic.exists()) - self.assertEqual(topic.name, topic_name) - - def test_list_topics(self): - before = _consume_topics(Config.CLIENT) - topics_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for topic_name in topics_to_create: - topic = Config.CLIENT.topic(topic_name) - topic.create() - self.to_delete.append(topic) - - # Retrieve the topics. - def _all_created(result): - return len(result) == len(before) + len(topics_to_create) - - retry = RetryResult(_all_created) - after = retry(_consume_topics)(Config.CLIENT) - - created = [topic for topic in after - if topic.name in topics_to_create and - topic.project == Config.CLIENT.project] - self.assertEqual(len(created), len(topics_to_create)) - - def test_create_subscription_defaults(self): - TOPIC_NAME = 'create-sub-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - def test_create_subscription_w_ack_deadline(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=120) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertEqual(subscription.ack_deadline, 120) - self.assertIs(subscription.topic, topic) - - def test_create_subscription_w_message_retention(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - duration = datetime.timedelta(hours=12) - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True, - message_retention_duration=duration) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertTrue(subscription.retain_acked_messages) - self.assertEqual(subscription.message_retention_duration, duration) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions(self): - TOPIC_NAME = 'list-sub' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - topic.create() - self.to_delete.append(topic) - empty = _consume_subscriptions(topic) - self.assertEqual(len(empty), 0) - subscriptions_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for subscription_name in subscriptions_to_create: - subscription = topic.subscription(subscription_name) - subscription.create() - self.to_delete.append(subscription) - - # Retrieve the subscriptions. - def _all_created(result): - return len(result) == len(subscriptions_to_create) - - retry = RetryResult(_all_created) - all_subscriptions = retry(_consume_subscriptions)(topic) - - created = [subscription for subscription in all_subscriptions - if subscription.name in subscriptions_to_create] - self.assertEqual(len(created), len(subscriptions_to_create)) - - def test_message_pull_mode_e2e(self): - import operator - TOPIC_NAME = 'message-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - MESSAGE_1 = b'MESSAGE ONE' - MESSAGE_2 = b'MESSAGE ONE' - EXTRA_1 = 'EXTRA 1' - EXTRA_2 = 'EXTRA 2' - topic.publish(MESSAGE_1, extra=EXTRA_1) - topic.publish(MESSAGE_2, extra=EXTRA_2) - - class Hoover(object): - - def __init__(self): - self.received = [] - - def done(self, *dummy): - return len(self.received) == 2 - - def suction(self): - with subscription.auto_ack(max_messages=2) as ack: - self.received.extend(ack.values()) - - hoover = Hoover() - retry = RetryInstanceState(hoover.done) - retry(hoover.suction)() - - message1, message2 = sorted(hoover.received, - key=operator.attrgetter('timestamp')) - - self.assertEqual(message1.data, MESSAGE_1) - self.assertEqual(message1.attributes['extra'], EXTRA_1) - self.assertIsNotNone(message1.service_timestamp) - - self.assertEqual(message2.data, MESSAGE_2) - self.assertEqual(message2.attributes['extra'], EXTRA_2) - self.assertIsNotNone(message2.service_timestamp) - - def _maybe_emulator_skip(self): - # NOTE: This method is necessary because ``Config.IN_EMULATOR`` - # is set at runtime rather than import time, which means we - # can't use the @unittest.skipIf decorator. - if Config.IN_EMULATOR: - self.skipTest('IAM not supported by Pub/Sub emulator') - - def test_topic_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_TOPICS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-topic-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - if topic.check_iam_permissions([PUBSUB_TOPICS_GET_IAM_POLICY]): - policy = topic.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = topic.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_subscription_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-sub-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - SUB_NAME = 'test-sub-iam-policy-sub' + unique_resource_id('-') - subscription = topic.subscription(SUB_NAME) - subscription.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(subscription.exists)() - self.to_delete.insert(0, subscription) - - if subscription.check_iam_permissions( - [PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY]): - policy = subscription.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = subscription.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_create_snapshot(self): - TOPIC_NAME = 'create-snap-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - before_snapshots = _consume_snapshots(Config.CLIENT) - - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=600) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - # There is no GET method for snapshot, so check existence using - # list - after_snapshots = _consume_snapshots(Config.CLIENT) - self.assertEqual(len(before_snapshots) + 1, len(after_snapshots)) - - def full_name(obj): - return obj.full_name - - self.assertIn(snapshot.full_name, map(full_name, after_snapshots)) - self.assertNotIn(snapshot.full_name, map(full_name, before_snapshots)) - - - def test_seek(self): - TOPIC_NAME = 'seek-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - - SUBSCRIPTION_NAME = 'subscribing-to-seek' + unique_resource_id('-') - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - MESSAGE_1 = b'MESSAGE ONE' - topic.publish(MESSAGE_1) - MESSAGE_2 = b'MESSAGE TWO' - topic.publish(MESSAGE_2) - - ((ack_id_1a, recvd_1a), ) = subscription.pull() - ((ack_id_2a, recvd_2a), ) = subscription.pull() - before_data = [obj.data for obj in (recvd_1a, recvd_2a)] - self.assertIn(MESSAGE_1, before_data) - self.assertIn(MESSAGE_2, before_data) - subscription.acknowledge((ack_id_1a, ack_id_2a)) - - self.assertFalse(subscription.pull(return_immediately=True)) - - subscription.seek_snapshot(snapshot) - - ((_, recvd_1b), ) = subscription.pull() - ((_, recvd_2b), ) = subscription.pull() - after_data = [obj.data for obj in (recvd_1b, recvd_2b)] - self.assertEqual(sorted(before_data), sorted(after_data)) diff --git a/pubsub/tests/unit/test__gax.py b/pubsub/tests/unit/test__gax.py deleted file mode 100644 index 2bd7983b40af..000000000000 --- a/pubsub/tests/unit/test__gax.py +++ /dev/null @@ -1,1598 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -try: - # pylint: disable=unused-import - import google.cloud.pubsub._gax - # pylint: enable=unused-import -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False -else: - _HAVE_GRPC = True - -from google.cloud._testing import _GAXBaseAPI - - -def _make_credentials(): - # pylint: disable=redefined-outer-name - import google.auth.credentials - # pylint: enable=redefined-outer-name - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(object): - PROJECT = 'PROJECT' - PROJECT_PATH = 'projects/%s' % (PROJECT,) - LIST_TOPICS_PATH = '%s/topics' % (PROJECT_PATH,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SUB_NAME = 'sub_name' - SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = '%s/snapshots/%s' % (PROJECT_PATH, SNAPSHOT_NAME) - TIME = 12345 - - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_PublisherAPI(_Base, unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _PublisherAPI - - return _PublisherAPI - - def test_ctor(self): - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_topics_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - TOKEN = 'TOKEN' - response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], - page_token=TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_topics_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - response = _GAXPageIterator( - [_TopicPB(self.TOPIC_PATH)], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, NEW_TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_create(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_create_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXPublisherAPI(_create_topic_conflict=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_hit(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_get_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_hit(self): - gax_api = _GAXPublisherAPI(_delete_topic_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI(_delete_topic_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': B64, 'attributes': {}} - response = _PublishResponsePB([MSGID]) - gax_api = _GAXPublisherAPI(_publish_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_publish_miss_w_attrs_w_bytes_payload(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD) - MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} - timeout = 120 # 120 seconds or 2 minutes - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE], timeout=timeout) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data, B64) - self.assertEqual(message_pb.attributes, {'foo': 'bar'}) - self.assertEqual(options.is_bundling, False) - self.assertEqual(options.timeout, timeout) - - def test_topic_publish_error(self): - import base64 - from google.gax.errors import GaxError - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': B64, 'attributes': {}} - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator([local_sub_path]) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator( - [local_sub_path], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(next_token, NEW_TOKEN) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - name, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(name, self.TOPIC_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_list_subscriptions_miss(self): - from google.gax import INITIAL_PAGE - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_error(self): - from google.gax import INITIAL_PAGE - from google.gax.errors import GaxError - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_SubscriberAPI(_Base, unittest.TestCase): - - PUSH_ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _SubscriberAPI - - return _SubscriberAPI - - def test_ctor(self): - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb]) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_subscriptions_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 23) - self.assertEqual(options.page_token, TOKEN) - - def test_subscription_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_optional_params(self): - import datetime - - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - expected_ack_deadline = 1729 - expected_push_endpoint = 'push-endpoint' - expected_retain_acked_messages = True - expected_message_retention_duration = datetime.timedelta( - days=1, hours=7, minutes=2, seconds=9) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, ack_deadline=expected_ack_deadline, - push_endpoint=expected_push_endpoint, - retain_acked_messages=expected_retain_acked_messages, - message_retention_duration=expected_message_retention_duration) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - print(gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, expected_push_endpoint) - self.assertEqual(ack_deadline, expected_ack_deadline) - self.assertEqual(retain_acked_messages, expected_retain_acked_messages) - self.assertEqual(message_retention_duration.seconds, - expected_message_retention_duration.total_seconds()) - self.assertIsNone(options) - - def test_subscription_create_already_exists(self): - from google.cloud.exceptions import Conflict - - DEADLINE = 600 - gax_api = _GAXSubscriberAPI(_create_subscription_conflict=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) - self.assertEqual(ack_deadline, DEADLINE) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_get_hit(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - gax_api = _GAXSubscriberAPI(_get_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_get(self.SUB_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'pushConfig': { - 'pushEndpoint': self.PUSH_ENDPOINT, - }, - } - self.assertEqual(resource, expected) - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_modify_push_config_hit(self): - gax_api = _GAXSubscriberAPI(_modify_push_config_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_push_config(self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_pull_explicit(self): - import base64 - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import _datetime_to_rfc3339 - - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - NOW_PB = _datetime_to_pb_timestamp(NOW) - NOW_RFC3339 = _datetime_to_rfc3339(NOW) - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = { - 'messageId': MSG_ID, - 'data': B64, - 'attributes': {'a': 'b'}, - 'publishTime': NOW_RFC3339, - } - RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}] - message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB) - response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)]) - gax_api = _GAXSubscriberAPI(_pull_response=response_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - MAX_MESSAGES = 10 - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RECEIVED) - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, MAX_MESSAGES) - self.assertTrue(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_deadline_exceeded(self): - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - result = api.subscription_pull(self.SUB_PATH) - self.assertEqual(result, []) - - def test_subscription_pull_deadline_exceeded_return_immediately(self): - from google.gax.errors import GaxError - - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH, return_immediately=True) - - def test_subscription_acknowledge_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_acknowledge_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_modify_ack_deadline_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_list_snapshots_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB( - name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb]) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_list_snapshots_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB(name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_subscription_seek_hit(self): - gax_api = _GAXSubscriberAPI(_seek_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_seek_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Snapshot - - snapshot_pb = Snapshot(name=self.SNAPSHOT_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_snapshot_response=snapshot_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - expected = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXSubscriberAPI(_create_snapshot_conflict=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_subscrption_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_snapshot_create_subscription_miss=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_publisher_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_publisher_api - - return make_gax_publisher_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - publisher_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_publisher_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - publisher_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_subscriber_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_subscriber_api - - return make_gax_subscriber_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - subscriber_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_subscriber_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - subscriber_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -class _GAXPublisherAPI(_GAXBaseAPI): - - _create_topic_conflict = False - - def list_topics(self, name, page_size, options): - self._list_topics_called_with = name, page_size, options - return self._list_topics_response - - def create_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._create_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if self._create_topic_conflict: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - return self._create_topic_response - - def get_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._get_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_topic_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_topic_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def publish(self, topic, messages, options=None): - from google.gax.errors import GaxError - - self._publish_called_with = topic, messages, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._publish_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_topic_subscriptions(self, topic, page_size, options=None): - from google.gax.errors import GaxError - - self._list_topic_subscriptions_called_with = topic, page_size, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._list_topic_subscriptions_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - -class _GAXSubscriberAPI(_GAXBaseAPI): - - _create_snapshot_conflict = False - _create_subscription_conflict = False - _modify_push_config_ok = False - _acknowledge_ok = False - _modify_ack_deadline_ok = False - _deadline_exceeded_gax_error = False - _snapshot_create_subscription_miss=False - - def list_subscriptions(self, project, page_size, options=None): - self._list_subscriptions_called_with = (project, page_size, options) - return self._list_subscriptions_response - - def create_subscription(self, name, topic, push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - options=None): - from google.gax.errors import GaxError - - self._create_subscription_called_with = ( - name, topic, push_config, ack_deadline_seconds, - retain_acked_messages, message_retention_duration, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_subscription_conflict: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - return self._create_subscription_response - - def get_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._get_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_subscription_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_subscription_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_push_config(self, name, push_config, options=None): - from google.gax.errors import GaxError - - self._modify_push_config_called_with = name, push_config, options - if self._random_gax_error: - raise GaxError('error') - if not self._modify_push_config_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def pull(self, name, max_messages, return_immediately, options=None): - from google.gax.errors import GaxError - - self._pull_called_with = ( - name, max_messages, return_immediately, options) - if self._random_gax_error: - raise GaxError('error') - if self._deadline_exceeded_gax_error: - raise GaxError('deadline exceeded', - self._make_grpc_deadline_exceeded()) - try: - return self._pull_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def acknowledge(self, name, ack_ids, options=None): - from google.gax.errors import GaxError - - self._acknowledge_called_with = name, ack_ids, options - if self._random_gax_error: - raise GaxError('error') - if not self._acknowledge_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_ack_deadline(self, name, ack_ids, deadline, options=None): - from google.gax.errors import GaxError - - self._modify_ack_deadline_called_with = ( - name, ack_ids, deadline, options) - if self._random_gax_error: - raise GaxError('error') - if not self._modify_ack_deadline_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_snapshots(self, project, page_size, options=None): - self._list_snapshots_called_with = (project, page_size, options) - return self._list_snapshots_response - - def create_snapshot(self, name, subscription, options=None): - from google.gax.errors import GaxError - - self._create_snapshot_called_with = (name, subscription, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_snapshot_conflict: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._snapshot_create_subscription_miss: - raise GaxError('miss', self._make_grpc_not_found()) - - return self._create_snapshot_response - - def delete_snapshot(self, snapshot, options=None): - from google.gax.errors import GaxError - - self._delete_snapshot_called_with = (snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._delete_snapshot_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def seek(self, subscription, time=None, snapshot=None, options=None): - from google.gax.errors import GaxError - - self._seek_called_with = (subscription, time, snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._seek_ok: - raise GaxError('miss', self._make_grpc_not_found()) - -class _TopicPB(object): - - def __init__(self, name): - self.name = name - - -class _PublishResponsePB(object): - - def __init__(self, message_ids): - self.message_ids = message_ids - - -class _PubsubMessagePB(object): - - def __init__(self, message_id, data, attributes, publish_time): - self.message_id = message_id - self.data = data - self.attributes = attributes - self.publish_time = publish_time - - -class _ReceivedMessagePB(object): - - def __init__(self, ack_id, message): - self.ack_id = ack_id - self.message = message - - -class _PullResponsePB(object): - - def __init__(self, received_messages): - self.received_messages = received_messages - - -class _Client(object): - - def __init__(self, project): - self.project = project diff --git a/pubsub/tests/unit/test__helpers.py b/pubsub/tests/unit/test__helpers.py deleted file mode 100644 index 0503d68b20b9..000000000000 --- a/pubsub/tests/unit/test__helpers.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_topic_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import topic_name_from_path - - return topic_name_from_path(path, project) - - def test_w_simple_name(self): - TOPIC_NAME = 'TOPIC_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - def test_w_name_w_all_extras(self): - TOPIC_NAME = 'TOPIC_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - -class Test_subscription_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import subscription_name_from_path - - return subscription_name_from_path(path, project) - - def test_w_simple_name(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - subscription_name = self._call_fut(PATH, PROJECT) - self.assertEqual(subscription_name, SUBSCRIPTION_NAME) - - def test_w_name_w_all_extras(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, SUBSCRIPTION_NAME) diff --git a/pubsub/tests/unit/test__http.py b/pubsub/tests/unit/test__http.py deleted file mode 100644 index d4bbc29dd6dd..000000000000 --- a/pubsub/tests/unit/test__http.py +++ /dev/null @@ -1,1162 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(unittest.TestCase): - PROJECT = 'PROJECT' - LIST_TOPICS_PATH = 'projects/%s/topics' % (PROJECT,) - LIST_SNAPSHOTS_PATH = 'projects/%s/snapshots' % (PROJECT,) - LIST_SUBSCRIPTIONS_PATH = 'projects/%s/subscriptions' % (PROJECT,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -class TestConnection(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import Connection - - return Connection - - def test_default_url(self): - conn = self._make_one(object()) - klass = self._get_target_class() - self.assertEqual(conn.api_base_url, klass.API_BASE_URL) - - def test_custom_url_from_env(self): - from google.cloud.environment_vars import PUBSUB_EMULATOR - - HOST = 'localhost:8187' - fake_environ = {PUBSUB_EMULATOR: HOST} - - with mock.patch('os.environ', new=fake_environ): - conn = self._make_one(object()) - - klass = self._get_target_class() - self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) - self.assertEqual(conn.api_base_url, 'http://' + HOST) - - def test_build_api_url_no_extra_query_params(self): - conn = self._make_one(object()) - URI = '/'.join([ - conn.API_BASE_URL, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo'), URI) - - def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - - conn = self._make_one(object()) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, - '/'.join(['', conn.API_VERSION, 'foo'])) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') - - def test_build_api_url_w_base_url_override(self): - base_url1 = 'api-base-url1' - base_url2 = 'api-base-url2' - conn = self._make_one(object()) - conn.api_base_url = base_url1 - URI = '/'.join([ - base_url2, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo', api_base_url=base_url2), - URI) - - def test_extra_headers(self): - from google.cloud import _http as base_http - from google.cloud.pubsub import _http as MUT - - http = mock.Mock(spec=['request']) - response = mock.Mock(status=200, spec=['status']) - data = b'brent-spiner' - http.request.return_value = response, data - client = mock.Mock(_http=http, spec=['_http']) - - conn = self._make_one(client) - req_data = 'req-data-boring' - result = conn.api_request( - 'GET', '/rainbow', data=req_data, expect_json=False) - self.assertEqual(result, data) - - expected_headers = { - 'Content-Length': str(len(req_data)), - 'Accept-Encoding': 'gzip', - base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - 'User-Agent': conn.USER_AGENT, - } - expected_uri = conn.build_api_url('/rainbow') - http.request.assert_called_once_with( - body=req_data, - headers=expected_headers, - method='GET', - uri=expected_uri, - ) - - -class Test_PublisherAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _PublisherAPI - - return _PublisherAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - returned = {'topics': [{'name': self.TOPIC_PATH}]} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_topics_with_paging(self): - import six - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - RETURNED = { - 'topics': [{'name': self.TOPIC_PATH}], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - topics = list(page) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN2) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_topics_missing_key(self): - returned = {} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_create(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - connection = _Connection() - connection._no_response_error = Conflict - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_hit(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_hit(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': [MSGID]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - msg_data = connection._called_with['data']['messages'][0]['data'] - self.assertEqual(msg_data, B64_PAYLOAD) - - def test_topic_publish_twice(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': []} - connection = _Connection(RETURNED, RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - messages = connection._called_with['data']['messages'] - self.assertEqual(len(messages), 1) - self.assertEqual(messages[0]['data'], B64_PAYLOAD) - - def test_topic_publish_miss(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - - def test_topic_list_subscriptions_no_paging(self): - from google.cloud.pubsub.topic import Topic - from google.cloud.pubsub.subscription import Subscription - - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = {'subscriptions': [local_sub_path]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = { - 'subscriptions': [local_sub_path], - 'nextPageToken': TOKEN2, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_topic_list_subscriptions_missing_key(self): - from google.cloud.pubsub.topic import Topic - - connection = _Connection({}) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_miss(self): - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - list(api.topic_list_subscriptions(topic)) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - -class Test_SubscriberAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _SubscriberAPI - - return _SubscriberAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = {'subscriptions': [SUB_INFO]} - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_subscriptions_missing_key(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_subscription_create_defaults(self): - RESOURCE = {'topic': self.TOPIC_PATH} - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_retain_messages(self): - import datetime - - RESOURCE = {'topic': self.TOPIC_PATH, - 'retainAckedMessages': True, - 'messageRetentionDuration': { - 'seconds': 1729, - 'nanos': 2718 * 1000 - } - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - retain_acked_messages=True, - message_retention_duration=datetime.timedelta( - seconds=1729, microseconds=2718)) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_explicit(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RESOURCE = { - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': { - 'pushEndpoint': PUSH_ENDPOINT, - }, - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - ack_deadline=ACK_DEADLINE, push_endpoint=PUSH_ENDPOINT) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_get(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RETURNED = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_get(self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_delete(self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_modify_push_config(self): - PUSH_ENDPOINT = 'https://api.example.com/push' - BODY = { - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_push_config(self.SUB_PATH, PUSH_ENDPOINT) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyPushConfig' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_defaults(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - BODY = { - 'returnImmediately': False, - 'maxMessages': 1, - } - - received = api.subscription_pull(self.SUB_PATH) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(received[0]['message']['data'], PAYLOAD) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_explicit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - MAX_MESSAGES = 10 - BODY = { - 'returnImmediately': True, - 'maxMessages': MAX_MESSAGES, - } - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_acknowledge(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:acknowledge' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_modify_ack_deadline(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - 'ackDeadlineSeconds': NEW_DEADLINE, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyAckDeadline' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_list_snapshots_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = {'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_snapshots_with_paging(self): - import six - - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = { - 'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - 'nextPageToken': TOKEN2, - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - snapshots = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_subscription_seek_snapshot(self): - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - RETURNED = {} - BODY = { - 'snapshot': local_snapshot_path - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek( - self.SUB_PATH, snapshot=local_snapshot_path) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_seek_time(self): - time = '12345' - RETURNED = {} - BODY = { - 'time': time - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek(self.SUB_PATH, time=time) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create(self): - RETURNED = { - 'name': self.SNAPSHOT_PATH, - 'subscription': self.SUB_PATH - } - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import NotFound - - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - - -class Test_IAMPolicyAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _IAMPolicyAPI - - return _IAMPolicyAPI - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, None) - api = self._make_one(client) - self.assertEqual(api.api_request, connection.api_request) - - def test_get_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - RETURNED = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.get_iam_policy(self.TOPIC_PATH) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s:getIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_set_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - RETURNED = POLICY.copy() - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:setIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'policy': POLICY}) - - def test_test_iam_permissions(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = ALL_ROLES[1:] - RETURNED = {'permissions': ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, ALLOWED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - def test_test_iam_permissions_missing_key(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, []) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - -class Test__transform_messages_base64_empty(unittest.TestCase): - def _call_fut(self, messages, transform, key=None): - from google.cloud.pubsub._http import _transform_messages_base64 - - return _transform_messages_base64(messages, transform, key) - - def test__transform_messages_base64_empty_message(self): - from base64 import b64decode - - DATA = [{'message': {}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {}}]) - - def test__transform_messages_base64_empty_data(self): - from base64 import b64decode - - DATA = [{'message': {'data': b''}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {'data': b''}}]) - - def test__transform_messages_base64_pull(self): - from base64 import b64encode - - DATA = [{'message': {'data': b'testing 1 2 3'}}] - self._call_fut(DATA, b64encode, 'message') - self.assertEqual(DATA[0]['message']['data'], - b64encode(b'testing 1 2 3')) - - def test__transform_messages_base64_publish(self): - from base64 import b64encode - - DATA = [{'data': b'testing 1 2 3'}] - self._call_fut(DATA, b64encode) - self.assertEqual(DATA[0]['data'], b64encode(b'testing 1 2 3')) - - -class _Connection(object): - - _called_with = None - _no_response_error = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - from google.cloud.exceptions import NotFound - - self._called_with = kw - try: - response, self._responses = self._responses[0], self._responses[1:] - except IndexError: - err_class = self._no_response_error or NotFound - raise err_class('miss') - return response - - -class _Client(object): - - def __init__(self, connection, project): - self._connection = connection - self.project = project diff --git a/pubsub/tests/unit/test_client.py b/pubsub/tests/unit/test_client.py deleted file mode 100644 index 407683606330..000000000000 --- a/pubsub/tests/unit/test_client.py +++ /dev/null @@ -1,462 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestClient(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.client import Client - - return Client - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_publisher_api_wo_gax(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.publisher_api - - self.assertIsInstance(api, _PublisherAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - - def test_no_gax_ctor(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - with mock.patch('google.cloud.pubsub.client._USE_GRPC', - new=True): - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - self.assertFalse(client._use_grpc) - api = client.publisher_api - self.assertIsInstance(api, _PublisherAPI) - - def _publisher_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxPublisherAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_publisher_api=_generated_api, - GAXPublisherAPI=_GaxPublisherAPI) - with patch: - api = client.publisher_api - - self.assertIsInstance(api, _GaxPublisherAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_publisher_api_w_gax(self): - self._publisher_api_w_gax_helper() - - def test_publisher_api_w_gax_and_emulator(self): - self._publisher_api_w_gax_helper(emulator=True) - - def test_subscriber_api_wo_gax(self): - from google.cloud.pubsub._http import _SubscriberAPI - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.subscriber_api - - self.assertIsInstance(api, _SubscriberAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - - def _subscriber_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxSubscriberAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_subscriber_api=_generated_api, - GAXSubscriberAPI=_GaxSubscriberAPI) - with patch: - api = client.subscriber_api - - self.assertIsInstance(api, _GaxSubscriberAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_subscriber_api_w_gax(self): - self._subscriber_api_w_gax_helper() - - def test_subscriber_api_w_gax_and_emulator(self): - self._subscriber_api_w_gax_helper(emulator=True) - - def test_iam_policy_api(self): - from google.cloud.pubsub._http import _IAMPolicyAPI - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = _Connection() - - api = client.iam_policy_api - self.assertIsInstance(api, _IAMPolicyAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.iam_policy_api - self.assertIs(again, api) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_topics_with_paging(self): - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) - client._publisher_api = api - - iterator = client.list_topics(SIZE, TOKEN1) - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertEqual(next_page_token, TOKEN2) - - self.assertEqual(api._listed_topics, (self.PROJECT, 1, TOKEN1)) - - def test_list_topics_missing_key(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI() - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - returned = {'subscriptions': [SUB_INFO]} - client._connection = _Connection(returned) - - iterator = client.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_page_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': {}, - }) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - # Set up the mock response. - ACK_DEADLINE = 42 - PUSH_ENDPOINT = 'https://push.example.com/endpoint' - SUB_INFO = {'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}} - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - returned = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': TOKEN2, - } - client._connection = _Connection(returned) - - iterator = client.list_subscriptions( - SIZE, TOKEN1) - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_page_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, ACK_DEADLINE) - self.assertEqual(subscription.push_endpoint, PUSH_ENDPOINT) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': { - 'pageSize': SIZE, - 'pageToken': TOKEN1, - }, - }) - - def test_list_subscriptions_w_missing_key(self): - PROJECT = 'PROJECT' - creds = _make_credentials() - - client = self._make_one(project=PROJECT, credentials=creds) - client._connection = object() - api = client._subscriber_api = _FauxSubscriberAPI() - api._list_subscriptions_response = (), None - - subscriptions, next_page_token = client.list_subscriptions() - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_subscriptions, - (self.PROJECT, None, None)) - - def test_list_snapshots(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxSubscriberAPI() - response = api._list_snapshots_response = object() - client._subscriber_api = api - self.assertEqual(client.list_snapshots(), response) - self.assertEqual(api._listed_snapshots, (self.PROJECT, None, None)) - - def test_topic_factory(self): - PROJECT = 'PROJECT' - TOPIC_NAME = 'TOPIC_NAME' - creds = _make_credentials() - - client_obj = self._make_one(project=PROJECT, credentials=creds) - new_topic = client_obj.topic(TOPIC_NAME) - self.assertEqual(new_topic.name, TOPIC_NAME) - self.assertIs(new_topic._client, client_obj) - self.assertEqual(new_topic.project, PROJECT) - self.assertEqual(new_topic.full_name, - 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)) - self.assertFalse(new_topic.timestamp_messages) - - def test_subscription_factory(self): - project = 'PROJECT' - creds = _make_credentials() - client_obj = self._make_one(project=project, credentials=creds) - - sub_name = 'hoot-n-holler' - ack_deadline = 60, - push_endpoint = 'https://api.example.com/push' - message_retention_duration = datetime.timedelta(3600) - new_subscription = client_obj.subscription( - sub_name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=True, - message_retention_duration=message_retention_duration) - - self.assertEqual(new_subscription.name, sub_name) - self.assertIsNone(new_subscription.topic) - self.assertIs(new_subscription._client, client_obj) - self.assertEqual(new_subscription.project, project) - self.assertEqual(new_subscription.ack_deadline, ack_deadline) - self.assertEqual(new_subscription.push_endpoint, push_endpoint) - self.assertTrue(new_subscription.retain_acked_messages) - self.assertEqual( - new_subscription.message_retention_duration, - message_retention_duration) - - -class _Iterator(object): - - def __init__(self, items, token): - self._items = items or () - self.next_page_token = token - - def __iter__(self): - return iter(self._items) - - -class _FauxPublisherAPI(object): - - def __init__(self, items=None, token=None): - self._items = items - self._token = token - - def list_topics(self, project, page_size, page_token): - self._listed_topics = (project, page_size, page_token) - return _Iterator(self._items, self._token) - - -class _FauxSubscriberAPI(object): - - def list_subscriptions(self, project, page_size, page_token): - self._listed_subscriptions = (project, page_size, page_token) - return self._list_subscriptions_response - - def list_snapshots(self, project, page_size, page_token): - self._listed_snapshots = (project, page_size, page_token) - return self._list_snapshots_response - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/pubsub/tests/unit/test_iam.py b/pubsub/tests/unit/test_iam.py deleted file mode 100644 index 475d375d0cd8..000000000000 --- a/pubsub/tests/unit/test_iam.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestPolicy(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.iam import Policy - - return Policy - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - empty = frozenset() - policy = self._make_one() - self.assertIsNone(policy.etag) - self.assertIsNone(policy.version) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_ctor_explicit(self): - VERSION = 17 - ETAG = 'ETAG' - empty = frozenset() - policy = self._make_one(ETAG, VERSION) - self.assertEqual(policy.etag, ETAG) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_publishers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_PUBLISHER_ROLE, - ) - PUBLISHER = 'user:phred@example.com' - expected = set([PUBLISHER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.publishers = [PUBLISHER] - - self.assertEqual(policy.publishers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_PUBLISHER_ROLE: expected}) - - def test_subscribers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_SUBSCRIBER_ROLE, - ) - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - expected = set([SUBSCRIBER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.subscribers = [SUBSCRIBER] - - self.assertEqual(policy.subscribers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_SUBSCRIBER_ROLE: expected}) diff --git a/pubsub/tests/unit/test_message.py b/pubsub/tests/unit/test_message.py deleted file mode 100644 index b4f6abfbb1b2..000000000000 --- a/pubsub/tests/unit/test_message.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestMessage(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.message import Message - - return Message - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_ctor_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, ATTRS) - self.assertIsNone(message.service_timestamp) - - def test_timestamp_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_wo_timestamp_in_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_w_timestamp_in_attributes(self): - from datetime import datetime - from google.cloud._helpers import _RFC3339_MICROS - from google.cloud._helpers import UTC - - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - TIMESTAMP = '2015-04-10T18:42:27.131956Z' - naive = datetime.strptime(TIMESTAMP, _RFC3339_MICROS) - timestamp = naive.replace(tzinfo=UTC) - ATTRS = {'timestamp': TIMESTAMP} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.timestamp, timestamp) - - def test_from_api_repr_missing_data(self): - MESSAGE_ID = '12345' - api_repr = {'messageId': MESSAGE_ID} - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, b'') - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_from_api_repr_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(message.service_timestamp, TIMESTAMP) - - def test_from_api_repr_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - ATTRS = {'a': 'b'} - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - 'attributes': ATTRS, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.service_timestamp, TIMESTAMP) - self.assertEqual(message.attributes, ATTRS) diff --git a/pubsub/tests/unit/test_snpashot.py b/pubsub/tests/unit/test_snpashot.py deleted file mode 100644 index 5834a1fedd89..000000000000 --- a/pubsub/tests/unit/test_snpashot.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSnapshot(unittest.TestCase): - PROJECT = 'PROJECT' - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.snapshot import Snapshot - - return Snapshot - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - client = _Client(project=self.PROJECT) - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_w_subscription(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, - subscription=subscription) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_error(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - with self.assertRaises(TypeError): - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client, - subscription=subscription) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - klass = self._get_target_class() - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsInstance(snapshot.topic, Topic) - - def test_from_api_repr_w_deleted_topic(self): - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': klass._DELETED_TOPIC_PATH - } - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsNone(snapshot.topic) - - def test_from_api_repr_w_topics_w_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topics = {} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - topic = snapshot.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - - def test_from_api_repr_w_topics_w_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - self.assertIs(snapshot.topic, topic) - - def test_create_w_bound_client_error(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_response = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - with self.assertRaises(RuntimeError): - snapshot.create() - - def test_create_w_bound_subscription(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create() - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_create_w_bound_subscription_w_alternate_client(self): - client = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create(client=client2) - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - def test_delete_w_alternate_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self) - - -class _Topic(object): - - def __init__(self, name, client): - self._client = client - - -class _Subscription(object): - - def __init__(self, name, client=None): - self._client = client - self.full_name = 'projects/%s/subscriptions/%s' % ( - client.project, name, ) - - -class _FauxSubscriberAPI(object): - - def snapshot_create(self, snapshot_path, subscription_path): - self._snapshot_created = (snapshot_path, subscription_path, ) - - def snapshot_delete(self, snapshot_path): - self._snapshot_deleted = (snapshot_path, ) - - diff --git a/pubsub/tests/unit/test_subscription.py b/pubsub/tests/unit/test_subscription.py deleted file mode 100644 index ddf0ea439d77..000000000000 --- a/pubsub/tests/unit/test_subscription.py +++ /dev/null @@ -1,957 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSubscription(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'sub_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - DEADLINE = 42 - ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import Subscription - - return Subscription - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - def test_ctor_explicit(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_ctor_w_client_wo_topic(self): - client = _Client(project=self.PROJECT) - subscription = self._make_one(self.SUB_NAME, client=client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - - def test_ctor_w_both_topic_and_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME, topic, client=client2) - - def test_ctor_w_neither_topic_nor_client(self): - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_deleted_topic(self): - klass = self._get_target_class() - resource = {'topic': klass._DELETED_TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - topics = {} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_w_topic_match(self): - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - klass = self._get_target_class() - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_full_name_and_path(self): - PROJECT = 'PROJECT' - SUB_FULL = 'projects/%s/subscriptions/%s' % (PROJECT, self.SUB_NAME) - SUB_PATH = '/%s' % (SUB_FULL,) - TOPIC_NAME = 'topic_name' - CLIENT = _Client(project=PROJECT) - topic = _Topic(TOPIC_NAME, client=CLIENT) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.full_name, SUB_FULL) - self.assertEqual(subscription.path, SUB_PATH) - - def test_autoack_defaults(self): - from google.cloud.pubsub.subscription import AutoAck - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack() - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_autoack_explicit(self): - from google.cloud.pubsub.subscription import AutoAck - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack(True, 10, client2) - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, client2) - - def test_create_pull_wo_ack_deadline_w_bound_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.create() - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, None, None, None, None)) - - def test_create_push_w_ack_deadline_w_alternate_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT} - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.create(client=client2) - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, self.DEADLINE, self.ENDPOINT, - None, None)) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertFalse(subscription.exists()) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_exists_hit_w_alternate_client(self): - RESPONSE = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertTrue(subscription.exists(client=client2)) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_w_bound_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_sets_topic(self): - from google.cloud.pubsub.topic import Topic - - response = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = response - subscription = self._make_one(self.SUB_NAME, client=client) - - self.assertIsNone(subscription.topic) - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - - def test_reload_w_alternate_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.reload(client=client2) - - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_delete_w_bound_client(self): - RESPONSE = {} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.delete() - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_delete_w_alternate_client(self): - RESPONSE = {} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.delete(client=client2) - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_modify_push_config_w_endpoint_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_push_configuration(push_endpoint=self.ENDPOINT) - - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, self.ENDPOINT)) - - def test_modify_push_config_wo_endpoint_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - push_endpoint=self.ENDPOINT) - - subscription.modify_push_configuration(push_endpoint=None, - client=client2) - - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, None)) - - def test_pull_wo_return_immediately_max_messages_w_bound_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull() - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD, - 'attributes': {'a': 'b'}} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=True, max_messages=3, - client=client2) - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {'a': 'b'}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, True, 3)) - - def test_pull_wo_receivedMessages(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=False) - - self.assertEqual(len(pulled), 0) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_acknowledge_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2]) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_acknowledge_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2], client=client2) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_modify_ack_deadline_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline([ACK_ID1, ACK_ID2], self.DEADLINE) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_modify_ack_deadline_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline( - [ACK_ID1, ACK_ID2], self.DEADLINE, client=client2) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_snapshot(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - snapshot = subscription.snapshot(self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot.topic, topic) - - def test_seek_snapshot_w_bound_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - snapshot = Snapshot - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_snapshot_w_alternate_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client1) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot, client=client2) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_time_w_bound_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_seek_time_w_alternate_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time, client=client2) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = subscription.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.SUB_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = Policy() - new_policy = subscription.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - self.assertEqual(api._set_iam_policy, (self.SUB_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - -class _FauxSubscribererAPI(object): - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - self._subscription_created = ( - subscription_path, topic_path, ack_deadline, push_endpoint, - retain_acked_messages, message_retention_duration) - return self._subscription_create_response - - def subscription_get(self, subscription_path): - from google.cloud.exceptions import NotFound - - self._subscription_got = subscription_path - try: - return self._subscription_get_response - except AttributeError: - raise NotFound(subscription_path) - - def subscription_delete(self, subscription_path): - self._subscription_deleted = subscription_path - return self._subscription_delete_response - - def subscription_modify_push_config( - self, subscription_path, push_endpoint): - self._subscription_modified_push_config = ( - subscription_path, push_endpoint) - return self._subscription_modify_push_config_response - - def subscription_pull(self, subscription_path, return_immediately, - max_messages): - self._subscription_pulled = ( - subscription_path, return_immediately, max_messages) - return self._subscription_pull_response - - def subscription_acknowledge(self, subscription_path, ack_ids): - self._subscription_acked = (subscription_path, ack_ids) - return self._subscription_acknowlege_response - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - self._subscription_modified_ack_deadline = ( - subscription_path, ack_ids, ack_deadline) - return self._subscription_modify_ack_deadline_response - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - self._subscription_seeked = ( - subscription_path, time, snapshot) - return self._subscription_seek_response - - -class TestAutoAck(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import AutoAck - - return AutoAck - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_ctor_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, CLIENT) - - def test___enter___w_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, False) - self.assertEqual(subscription._max_messages, 1) - self.assertIsNone(subscription._client) - - def test___enter___w_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, True) - self.assertEqual(subscription._max_messages, 10) - self.assertIs(subscription._client, CLIENT) - - def test___exit___(self): - CLIENT = object() - ACK_ID1, MESSAGE1 = 'ACK_ID1', _FallibleMessage() - ACK_ID2, MESSAGE2 = 'ACK_ID2', _FallibleMessage() - ACK_ID3, MESSAGE3 = 'ACK_ID3', _FallibleMessage(True) - ITEMS = [ - (ACK_ID1, MESSAGE1), - (ACK_ID2, MESSAGE2), - (ACK_ID3, MESSAGE3), - ] - subscription = _FauxSubscription(ITEMS) - auto_ack = self._make_one(subscription, client=CLIENT) - with auto_ack: - for ack_id, message in list(auto_ack.items()): - if message.fail: - del auto_ack[ack_id] - self.assertEqual(sorted(subscription._acknowledged), - [ACK_ID1, ACK_ID2]) - self.assertIs(subscription._ack_client, CLIENT) - - def test_empty_ack_no_acknowledge(self): - subscription = mock.Mock(_FauxSubscription) - subscription.pull = lambda *args: [] - - auto_ack = self._make_one(subscription) - with auto_ack: - pass - - subscription.acknowledge.assert_not_called() - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name, client): - self.name = name - self._client = client - self.project = client.project - self.full_name = 'projects/%s/topics/%s' % (client.project, name) - self.path = '/projects/%s/topics/%s' % (client.project, name) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name, timestamp_messages=False): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - -class _FallibleMessage(object): - - def __init__(self, fail=False): - self.fail = fail - - -class _FauxSubscription(object): - - def __init__(self, items): - self._items = items - self._mapping = dict(items) - self._acknowledged = set() - - def pull(self, return_immediately=False, max_messages=1, client=None): - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - return self._items - - def acknowledge(self, ack_ids, client=None): - self._ack_client = client - for ack_id in ack_ids: - message = self._mapping[ack_id] - assert not message.fail - self._acknowledged.add(ack_id) diff --git a/pubsub/tests/unit/test_topic.py b/pubsub/tests/unit/test_topic.py deleted file mode 100644 index 2c90432195c2..000000000000 --- a/pubsub/tests/unit/test_topic.py +++ /dev/null @@ -1,974 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestTopic(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Topic - - return Topic - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_w_explicit_timestamp(self): - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, - client=client, - timestamp_messages=True) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertTrue(topic.timestamp_messages) - - def test_from_api_repr(self): - client = _Client(project=self.PROJECT) - resource = {'name': self.TOPIC_PATH} - klass = self._get_target_class() - topic = klass.from_api_repr(resource, client=client) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertIs(topic._client, client) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - - def test_from_api_repr_with_bad_client(self): - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' - client = _Client(project=PROJECT1) - PATH = 'projects/%s/topics/%s' % (PROJECT2, self.TOPIC_NAME) - resource = {'name': PATH} - klass = self._get_target_class() - self.assertRaises(ValueError, klass.from_api_repr, - resource, client=client) - - def test_create_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.create() - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_create_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.create(client=client2) - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - self.assertFalse(topic.exists()) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_exists_hit_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_get_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - self.assertTrue(topic.exists(client=client2)) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.delete() - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_delete_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.delete(client=client2) - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_publish_single_bytes_wo_attrs_w_bound_client(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): - import datetime - from google.cloud._helpers import _RFC3339_MICROS - - NOW = datetime.datetime.utcnow() - - def _utcnow(): - return NOW - - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = { - 'data': PAYLOAD, - 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - - topic = self._make_one(self.TOPIC_NAME, client=client1, - timestamp_messages=True) - with mock.patch('google.cloud.pubsub.topic._NOW', new=_utcnow): - msgid = topic.publish(PAYLOAD, client=client2) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - OVERRIDE = '2015-04-10T16:46:22.868399Z' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': OVERRIDE}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client, - timestamp_messages=True) - - msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_w_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD, attr1='value1', attr2='value2') - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_with_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_without_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_multiple_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_w_no_messages(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - pass - - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._api_called, 0) - - def test_publish_multiple_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = { - 'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - with topic.batch(client=client2) as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_multiple_error(self): - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - try: - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertEqual(list(batch), []) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_subscription(self): - from google.cloud.pubsub.subscription import Subscription - - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, client=client) - - SUBSCRIPTION_NAME = 'subscription_name' - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions_no_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - 'nextPageToken': TOKEN, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertEqual(next_page_token, TOKEN) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - PAGE_SIZE = 10 - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions( - page_size=PAGE_SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], - {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) - - def test_list_subscriptions_missing_key(self): - from google.cloud.pubsub.client import Client - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - client._connection = _Connection({}) - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client) - - policy = topic.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = topic.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, - 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, - 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, - 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, - 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, - 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = topic.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.TOPIC_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = Policy() - new_policy = topic.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - - self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = self._make_one(self.TOPIC_NAME, client=client) - - allowed = topic.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - allowed = topic.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - -class TestBatch(unittest.TestCase): - PROJECT = 'PROJECT' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Batch - - return Batch - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_ctor_defaults(self): - topic = _Topic() - client = _Client(project=self.PROJECT) - batch = self._make_one(topic, client) - self.assertIs(batch.topic, topic) - self.assertIs(batch.client, client) - self.assertEqual(len(batch.messages), 0) - self.assertEqual(len(batch.message_ids), 0) - - def test___iter___empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - self.assertEqual(list(batch), []) - - def test___iter___non_empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - batch.message_ids[:] = ['ONE', 'TWO', 'THREE'] - self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE']) - - def test_publish_bytes_wo_attrs(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {}} - client = _Client(project=self.PROJECT) - topic = _Topic() - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_publish_bytes_w_add_timestamp(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': 'TIMESTAMP'}} - client = _Client(project=self.PROJECT) - topic = _Topic(timestamp_messages=True) - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_commit_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, - 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit() - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_commit_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client1 = _Client(project='PROJECT') - client2 = _Client(project='PROJECT') - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client1) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit(client=client2) - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_success(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertIs(other, batch) - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_failure(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - topic = _Topic() - batch = self._make_one(topic, client=client) - - try: - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertIs(other, batch) - self.assertEqual(list(batch), []) - self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2]) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_batch_messages(self): - # Establish that a batch actually batches messsages in the expected - # way. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Check the contents of the batch. - self.assertEqual(batch.messages, [ - {'data': 'Batch message 0.', 'attributes': {}}, - {'data': 'Batch message 1.', 'attributes': {}}, - {'data': 'Batch message 2.', 'attributes': {}}, - {'data': 'Batch message 3.', 'attributes': {}}, - ]) - - def test_message_count_autocommit(self): - # Establish that if the batch is assigned to take a maximum - # number of messages, that it commits when it reaches that maximum. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_messages=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Publish a fifth message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - @mock.patch('time.time') - def test_message_time_autocommit(self, mock_time): - # Establish that if the batch is sufficiently old, that it commits - # the next time it receives a publish. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - mock_time.return_value = 0.0 - with self._make_one(topic, client=client, max_interval=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish some messages and establish that the batch does - # not commit. - for i in range(0, 10): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Move time ahead so that this batch is too old. - mock_time.return_value = 10.0 - - # Publish another message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - def test_message_size_autocommit(self): - # Establish that if the batch is sufficiently large, that it - # auto-commits. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_size=100) as batch: - self.assertIsInstance(batch, Batch) - - # Publish a short (< 100 bytes) message and establish that - # the batch does not commit. - batch.publish(b'foo') - commit.assert_not_called() - - # Publish another message and observe the commit. - batch.publish(u'The final call to trigger a commit, because ' - u'this message is sufficiently long.') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - -class _FauxPublisherAPI(object): - _api_called = 0 - - def topic_create(self, topic_path): - self._topic_created = topic_path - return self._topic_create_response - - def topic_get(self, topic_path): - from google.cloud.exceptions import NotFound - - self._topic_got = topic_path - try: - return self._topic_get_response - except AttributeError: - raise NotFound(topic_path) - - def topic_delete(self, topic_path): - self._topic_deleted = topic_path - return self._topic_delete_response - - def topic_publish(self, topic_path, messages): - self._topic_published = topic_path, messages - self._api_called += 1 - return self._topic_publish_response - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name="NAME", project="PROJECT", - timestamp_messages=False): - self.full_name = 'projects/%s/topics/%s' % (project, name) - self.path = '/%s' % (self.full_name,) - self.timestamp_messages = timestamp_messages - - def _timestamp_message(self, attrs): - if self.timestamp_messages: - attrs['timestamp'] = 'TIMESTAMP' - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - -class _Bugout(Exception): - pass - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response From c8a9fd678bd7a9ebbea2b1bd0e65cdf30160ce94 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 25 May 2017 08:45:44 -0700 Subject: [PATCH 02/86] stuff --- .../google/cloud/pubsub_v1/publisher/batch.py | 41 +++++- .../cloud/pubsub_v1/publisher/client.py | 16 +-- .../cloud/pubsub_v1/publisher/future.py | 129 ++++++++++++++++++ 3 files changed, 165 insertions(+), 21 deletions(-) create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/future.py diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index cd5eee108c26..821dfd5fbbfd 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -15,10 +15,13 @@ from __future__ import absolute_import import collections +import queue import time +from google.cloud.pubsub_v1.publisher import future -Message = collections.namedtuple('Message', ['data', 'attrs']) + +Message = collections.namedtuple('Message', ['data', 'attrs', '_client_id']) class Batch(object): @@ -49,13 +52,33 @@ class Batch(object): def __init__(self, client, settings): self._client = client self._settings = settings - self._messages = self._client._queue_class() + self._messages = queue.Queue() self._status = 'accepting messages' # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. self._client.thread_class(self.monitor) + @property + def client(self): + """Return the client that created this batch. + + Returns: + :class:~`pubsub_v1.client.Client`: The client that created this + batch. + """ + return self._client + + @property + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. + """ + return self._status + def commit(self): """Actually publish all of the messages on the active batch. @@ -67,9 +90,7 @@ def commit(self): if self._client._batch is self: self._client._batch = None - # Add this to the set of in-flight batches, to ensure we are holding - # a reference. - self._client._in_flight_batches.add(self) + # Begin the request to publish these messages. def monitor(self): """Commit this batch after sufficient time has elapsed. @@ -141,4 +162,12 @@ def publish(self, data, **attrs): 'be sent as text strings.') # Add the message to the batch. - self._messages.put(Message(data=data, attrs=attrs)) + # + # We add an internal ID (note: a client-side ID, *not* the Pub/Sub + # ID) so we can track the message later. + _id = six.text_type(uuid.uuid4()) + self._messages.put(Message(data=data, attrs=attrs, client_id=_id)) + + # Return a Future. That future needs to be aware of the status + # of this batch. + return future.Future(self) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index f2a904489102..e5d4d1b130d2 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -42,9 +42,6 @@ class PublisherClient(object): thread_class (class): Any class that is duck-type compatible with :class:`threading.Thread`. The default is :class:`multiprocessing.Process` - queue_class (class): Any class that is duck-type compatible with - :class:`queue.Queue`. - The default is :class:`multiprocessing.Queue`. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying :class:`~gapic.pubsub.v1.publisher_client.PublisherClient`. @@ -60,9 +57,8 @@ def __init__(self, batching=(), thread_class=multiprocessing.Process, self.api = publisher_client.PublisherClient(*args, **kwargs) self.batching = types.Batching(batching) - # Set the thread and queue classes. + # Set the thread class. self._thread_class = thread_class - self._queue_class = queue_class # The batch on the publisher client is responsible for holding # messages. @@ -70,7 +66,6 @@ def __init__(self, batching=(), thread_class=multiprocessing.Process, # We set this to None for now; the first message that is published # will create it (in order to ensure that the start time is correct). self._batch = None - self._in_flight_batches = set() @property def batch(self): @@ -85,15 +80,6 @@ def batch(self): self_batch = Batch(client=self, settings=self.batching) return self._batch - @property - def queue_class(self): - """Return the queue class provided at instantiation. - - Returns: - class: A class duck-type compatible with :class:`queue.Queue`. - """ - return self._queue_class - @property def thread_class(self): """Return the thread class provided at instantiation. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py new file mode 100644 index 000000000000..7fcc6dc59f2e --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -0,0 +1,129 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import queue + + +class Future(object): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from asychronous Pub/Sub calls, and is the + interface to determine the status of those calls. + + This object should not be created directly, but is returned by other + methods in this library. + + Args: + batch (:class:~`pubsub_v1.batch.Batch`): The batch object that + is committing this message. + """ + def __init__(self, batch): + self._batch = batch + self._callbacks = queue.Queue() + + def cancel(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def cancelled(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def running(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns True. + """ + return True + + def done(self): + """Return True if the publish has completed, False otherwise. + + This still returns True in failure cases; checking `result` or + `exception` is the canonical way to assess success or failure. + """ + return self.batch.status in ('done', 'error') + + def result(self, timeout=None): + """Return the message ID, or raise an exception. + + This blocks until the message has successfully been published, and + returns the message ID. + + Args: + timeout (int|float): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + :class:~`pubsub_v1.TimeoutError`: If the request times out. + :class:~`Exception`: For undefined exceptions in the underlying + call execution. + """ + + def exception(self, timeout=None): + """Return the exception raised by the call, if any. + + This blocks until the message has successfully been published, and + returns the exception. If the call succeeded, return None. + + Args: + timeout (int|float): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + :class:~`pubsub_v1.TimeoutError`: If the request times out. + + Returns: + :class:`Exception`: The exception raised by the call, if any. + """ + + def add_done_callback(self, fn): + """Attach the provided callable to the future. + + The provided function is called, with this future as its only argument, + when the future finishes running. + """ + if self.done(): + fn(self) + self._callbacks.put(fn) + + def _trigger(self): + """Trigger all callbacks registered to this Future. + + This method is called internally by the batch once the batch + completes. + """ + try: + while True: + callback = self._callbacks.get(block=False) + callback(self) + except queue.Empty: + return None + + +class TimeoutError(object): + """Exception subclass for timeout-related errors. + + This exception is only returned by the :class:~`pubsub_v1.future.Future` + class. + """ + pass From 9b09b8fbb0c4b7cc60ff5b26d093fcb00bd5416a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 25 May 2017 11:27:12 -0700 Subject: [PATCH 03/86] WIP --- .../cloud/pubsub_v1/publisher/client.py | 80 ++++++++++++++----- .../cloud/pubsub_v1/publisher/future.py | 36 ++++++++- 2 files changed, 92 insertions(+), 24 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index e5d4d1b130d2..51b4fdc5c430 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -60,15 +60,20 @@ def __init__(self, batching=(), thread_class=multiprocessing.Process, # Set the thread class. self._thread_class = thread_class - # The batch on the publisher client is responsible for holding - # messages. - # - # We set this to None for now; the first message that is published - # will create it (in order to ensure that the start time is correct). - self._batch = None + # The batches on the publisher client are responsible for holding + # messages. One batch exists for each topic. + self._batches = {} @property - def batch(self): + def thread_class(self): + """Return the thread class provided at instantiation. + + Returns: + class: A class duck-type compatible with :class:`threading.Thread`. + """ + return self._thread_class + + def batch(self, topic): """Return the current batch. This will create a new batch if no batch currently exists. @@ -76,19 +81,54 @@ def batch(self): Returns: :class:~`pubsub_v1.batch.Batch` The batch object. """ - if self._batch is None: - self_batch = Batch(client=self, settings=self.batching) - return self._batch - - @property - def thread_class(self): - """Return the thread class provided at instantiation. + if topic not in self._batch: + self._batch[topic] = Batch( + client=self, + settings=self.batching, + topic=topic, + ) + return self._batch[topic] + + def publish(self, topic, data, **attrs): + """Publish a single message. + + .. note:: + Messages in Pub/Sub are blobs of bytes. They are *binary* data, + not text. You must send data as a bytestring + (``bytes`` in Python 3; ``str`` in Python 2), and this library + will raise an exception if you send a text string. + + The reason that this is so important (and why we do not try to + coerce for you) is because Pub/Sub is also platform independent + and there is no way to know how to decode messages properly on + the other side; therefore, encoding and decoding is a required + exercise for the developer. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + Example: + >>> from google.cloud.pubsub_v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'The rain in Wales falls mainly on the snails.' + >>> response = client.publish(topic, data, username='guido') + + Args: + topic (:class:~`pubsub_v1.types.Topic`): The topic to publish + messages to. + data (bytes): A bytestring representing the message body. This + must be a bytestring (a text string will raise TypeError). + attrs (Mapping[str, str]): A dictionary of attributes to be + sent as metadata. (These may be text strings or byte strings.) + + Raises: + :exc:`TypeError`: If the ``data`` sent is not a bytestring, or + if the ``attrs`` are not either a ``str`` or ``bytes``. Returns: - class: A class duck-type compatible with :class:`threading.Thread`. + :class:~`pubsub_v1.publisher.futures.Future`: An object conforming + to the ``concurrent.futures.Future`` interface. """ - return self._thread_class - - @functools.wraps(Batch.publish) - def publish(self, data, **attrs): - return self.batch.publish(data, *attrs) + return self.batch(topic).publish(data, *attrs) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index 7fcc6dc59f2e..966cc87609d5 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -29,9 +29,11 @@ class Future(object): Args: batch (:class:~`pubsub_v1.batch.Batch`): The batch object that is committing this message. + client_id (str): The client ID of the message. """ - def __init__(self, batch): + def __init__(self, batch, client_id): self._batch = batch + self._client_id = client_id self._callbacks = queue.Queue() def cancel(self): @@ -61,7 +63,7 @@ def done(self): This still returns True in failure cases; checking `result` or `exception` is the canonical way to assess success or failure. """ - return self.batch.status in ('done', 'error') + return self.batch.status in ('success', 'error') def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -78,8 +80,15 @@ def result(self, timeout=None): :class:~`Exception`: For undefined exceptions in the underlying call execution. """ - - def exception(self, timeout=None): + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self.batch.get_message_id(self._client_id) + raise err + + def exception(self, timeout=None, _wait=1): """Return the exception raised by the call, if any. This blocks until the message has successfully been published, and @@ -95,6 +104,25 @@ def exception(self, timeout=None): Returns: :class:`Exception`: The exception raised by the call, if any. """ + # If the batch completed successfully, this should return None. + if self.batch.status == 'success': + return None + + # If this batch had an error, this should return it. + if self.batch.status == 'error': + return self.batch._error + + # If the timeout has been exceeded, raise TimeoutError. + if timeout < 0: + raise TimeoutError('Timed out waiting for an exception.') + + # Wait a little while and try again. + time.sleep(_wait) + return self.exception( + timeout=timeout - _wait, + _wait=min(_wait * 2, 60), + ) + def add_done_callback(self, fn): """Attach the provided callable to the future. From 8f0832e78405fc79bb2ae867dffdf3a525a95878 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 25 May 2017 15:03:24 -0700 Subject: [PATCH 04/86] wip --- .../google/cloud/pubsub_v1/publisher/batch.py | 63 ++++++++++++++++--- .../cloud/pubsub_v1/publisher/future.py | 10 ++- 2 files changed, 61 insertions(+), 12 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index 821dfd5fbbfd..8839ae891f54 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -18,10 +18,10 @@ import queue import time +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import future - -Message = collections.namedtuple('Message', ['data', 'attrs', '_client_id']) +QueueItem = collections.namedtuple('QueueItem', ['message', 'future']) class Batch(object): @@ -49,11 +49,14 @@ class Batch(object): settings for batch publishing. These should be considered immutable once the batch has been opened. """ - def __init__(self, client, settings): + def __init__(self, client, topic, settings): self._client = client + self._topic = topic self._settings = settings self._messages = queue.Queue() + self._futures = queue.Queue() self._status = 'accepting messages' + self._message_ids = {} # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. @@ -90,7 +93,49 @@ def commit(self): if self._client._batch is self: self._client._batch = None + # Update the status. + self._status = 'in-flight' + # Begin the request to publish these messages. + response = self._client.api.publish(self._topic, self.flush()) + + # FIXME (lukesneeringer): How do I check for errors on this? + self._status = 'success' + + # Iterate over the futures on the queue and return the response IDs. + # We are trusting that there is a 1:1 mapping, and raise an exception + # if not. + try: + for message_id in response.message_ids: + future_ = self._futures.get(block=False) + self._message_ids[future_] = message_id + future_._trigger()) + except queue.Empty: + raise ValueError('More message IDs came back than messages ' + 'were published.') + + # If the queue of futures is not empty, we did not get enough IDs + # back. + if self._futures.empty(): + raise ValueError('Fewer message IDs came back than messages ' + 'were published.') + + + def flush(self): + """Flush the messages off of this queue, one at a time. + + This method is called when the batch is committed. Calling it outside + of the context of committing will effectively remove messages + from the batch. + + Yields: + :class:~`pubsub_v1.types.PubSubMessage`: A Pub/Sub Message. + """ + try: + while True: + yield self._messages.get(block=False) + except queue.Empty: + raise StopIteration def monitor(self): """Commit this batch after sufficient time has elapsed. @@ -161,13 +206,11 @@ def publish(self, data, **attrs): raise TypeError('All attributes being published to Pub/Sub must ' 'be sent as text strings.') - # Add the message to the batch. - # - # We add an internal ID (note: a client-side ID, *not* the Pub/Sub - # ID) so we can track the message later. - _id = six.text_type(uuid.uuid4()) - self._messages.put(Message(data=data, attrs=attrs, client_id=_id)) + # Store the actual message in the batch's message queue. + self._messages.put(PubSubMessage(data=data, attributes=attrs)) # Return a Future. That future needs to be aware of the status # of this batch. - return future.Future(self) + f = future.Future(self) + self._futures.put(f) + return f diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index 966cc87609d5..78e192a19c6a 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import queue +import uuid class Future(object): @@ -33,9 +34,12 @@ class Future(object): """ def __init__(self, batch, client_id): self._batch = batch - self._client_id = client_id + self._hash = hash(uuid.uuid4()) self._callbacks = queue.Queue() + def __hash__(self): + return self._hash + def cancel(self): """Publishes in Pub/Sub currently may not be canceled. @@ -123,7 +127,6 @@ def exception(self, timeout=None, _wait=1): _wait=min(_wait * 2, 60), ) - def add_done_callback(self, fn): """Attach the provided callable to the future. @@ -139,6 +142,9 @@ def _trigger(self): This method is called internally by the batch once the batch completes. + + Args: + message_id (str): The message ID, as a string. """ try: while True: From 02d7658794dde11098a0d95e6e9ba54aac14c375 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 30 May 2017 11:25:30 -0700 Subject: [PATCH 05/86] WIP --- pubsub/google/cloud/pubsub_v1/_gapic.py | 29 +++++++++++++++---- .../cloud/pubsub_v1/publisher/client.py | 4 ++- pubsub/setup.py | 5 +++- 3 files changed, 31 insertions(+), 7 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/_gapic.py b/pubsub/google/cloud/pubsub_v1/_gapic.py index 7379d91503b3..cb95557034ab 100644 --- a/pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/pubsub/google/cloud/pubsub_v1/_gapic.py @@ -14,6 +14,8 @@ from __future__ import absolute_import +from functools import wraps + def add_methods(SourceClass, blacklist=()): """Add wrapped versions of the `api` member's methods to the class. @@ -22,6 +24,25 @@ def add_methods(SourceClass, blacklist=()): Additionally, any methods explicitly defined on the wrapped class are not added. """ + def wrap(wrapped): + """Wrap a GAPIC method; preserve its name and docstring.""" + # If this is a static or class method, then we need to *not* + # send self as the first argument. + # + # Similarly, for instance methods, we need to send self.api rather + # than self, since that is where the actual methods were declared. + instance_method = hasattr(wrapped_fx, '__self__') + if issubclass(type(wrapped_fx.__self__), type): + instance_method = Flase + + # Okay, we have figured out what kind of method this is; send + # down the correct wrapper function. + if instance_method: + fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) + return functools.wraps(wrapped_fx)(fx) + fx = lambda self, *a, **kw: wrapped_fx(*a, **kw) + return functools.wraps(wrapped_fx)(fx) + def actual_decorator(cls): # Reflectively iterate over most of the methods on the source class # (the GAPIC) and make wrapped versions available on this client. @@ -35,15 +56,13 @@ def actual_decorator(cls): continue # Retrieve the attribute, and ignore it if it is not callable. - attr = getattr(self.api, name) + attr = getattr(cls._gapic_class, name) if not callable(attr): continue # Add a wrapper method to this object. - fx = lambda self, *a, **kw: getattr(self.api, name)(*a, **kw) - fx.__name__ = name - fx.__doc__ = attr.__doc__ - setattr(self, name, fx) + fx = wrap(getattr(cls._gapic_class, name)) + setattr(cls, name, fx) # Return the augmented class. return cls diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 51b4fdc5c430..1aa6d2177569 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -22,6 +22,7 @@ from google.cloud.gapic.pubsub.v1 import publisher_client +from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types @@ -47,6 +48,7 @@ class PublisherClient(object): :class:`~gapic.pubsub.v1.publisher_client.PublisherClient`. Generally, you should not need to set additional keyword arguments. """ + _gapic_class = publisher_client.PublisherClient def __init__(self, batching=(), thread_class=multiprocessing.Process, queue_class=multiprocessing.Queue, **kwargs): @@ -54,7 +56,7 @@ def __init__(self, batching=(), thread_class=multiprocessing.Process, # client. kwargs['lib_name'] = 'gccl' kwargs['lib_version'] = __VERSION__ - self.api = publisher_client.PublisherClient(*args, **kwargs) + self.api = self._gapic_class(**kwargs) self.batching = types.Batching(batching) # Set the thread class. diff --git a/pubsub/setup.py b/pubsub/setup.py index 4ff4c6b5cc66..1899896ece21 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -51,13 +51,16 @@ REQUIREMENTS = [ + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', + 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'grpcio >= 1.0.2, < 2.0dev', 'psutil >= 5.2.2, < 6.0dev', ] setup( name='google-cloud-pubsub', - version='0.25.0', + version='0.26.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 503d11f8efa0f7f90a277925d2295e22f1e289e9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 30 May 2017 12:48:02 -0700 Subject: [PATCH 06/86] WIP, fixing bugs. --- pubsub/google/cloud/pubsub_v1/_gapic.py | 11 +++---- .../google/cloud/pubsub_v1/publisher/batch.py | 17 ++++++----- .../cloud/pubsub_v1/publisher/client.py | 29 +++++++++++++++---- .../cloud/pubsub_v1/publisher/future.py | 3 +- 4 files changed, 41 insertions(+), 19 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/_gapic.py b/pubsub/google/cloud/pubsub_v1/_gapic.py index cb95557034ab..145682860215 100644 --- a/pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/pubsub/google/cloud/pubsub_v1/_gapic.py @@ -14,7 +14,7 @@ from __future__ import absolute_import -from functools import wraps +import functools def add_methods(SourceClass, blacklist=()): @@ -24,16 +24,17 @@ def add_methods(SourceClass, blacklist=()): Additionally, any methods explicitly defined on the wrapped class are not added. """ - def wrap(wrapped): + def wrap(wrapped_fx): """Wrap a GAPIC method; preserve its name and docstring.""" # If this is a static or class method, then we need to *not* # send self as the first argument. # # Similarly, for instance methods, we need to send self.api rather # than self, since that is where the actual methods were declared. - instance_method = hasattr(wrapped_fx, '__self__') - if issubclass(type(wrapped_fx.__self__), type): - instance_method = Flase + instance_method = True + self = getattr(wrapped_fx, '__self__', None) + if issubclass(type(self), type): + instance_method = False # Okay, we have figured out what kind of method this is; send # down the correct wrapper function. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index 8839ae891f54..c208318c1d7a 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -15,9 +15,12 @@ from __future__ import absolute_import import collections +import copy import queue import time +import six + from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import future @@ -60,7 +63,8 @@ def __init__(self, client, topic, settings): # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. - self._client.thread_class(self.monitor) + self._process = self._client.thread_class(target=self.monitor) + self._process.start() @property def client(self): @@ -90,8 +94,7 @@ def commit(self): completion. """ # If this is the active batch on the cleint right now, remove it. - if self._client._batch is self: - self._client._batch = None + self._client.batch(self._topic, pop=self) # Update the status. self._status = 'in-flight' @@ -109,7 +112,7 @@ def commit(self): for message_id in response.message_ids: future_ = self._futures.get(block=False) self._message_ids[future_] = message_id - future_._trigger()) + future_._trigger() except queue.Empty: raise ValueError('More message IDs came back than messages ' 'were published.') @@ -129,7 +132,7 @@ def flush(self): from the batch. Yields: - :class:~`pubsub_v1.types.PubSubMessage`: A Pub/Sub Message. + :class:~`pubsub_v1.types.PubsubMessage`: A Pub/Sub Message. """ try: while True: @@ -197,7 +200,7 @@ def publish(self, data, **attrs): 'as a bytestring.') # Coerce all attributes to text strings. - for k, v in copy(attrs).items(): + for k, v in copy.copy(attrs).items(): if isinstance(data, six.text_type): continue if isinstance(data, six.binary_type): @@ -207,7 +210,7 @@ def publish(self, data, **attrs): 'be sent as text strings.') # Store the actual message in the batch's message queue. - self._messages.put(PubSubMessage(data=data, attributes=attrs)) + self._messages.put(types.PubsubMessage(data=data, attributes=attrs)) # Return a Future. That future needs to be aware of the status # of this batch. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 1aa6d2177569..456da3c9e148 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -24,6 +24,7 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch import Batch __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -75,21 +76,39 @@ def thread_class(self): """ return self._thread_class - def batch(self, topic): + def batch(self, topic, create=True, pop=None): """Return the current batch. - This will create a new batch if no batch currently exists. + This will create a new batch only if no batch currently exists. + + Args: + topic (str): A string representing the topic. + create (bool): Whether to create a new batch if no batch is + found. Defaults to True. + pop (:class:~`pubsub_v1.batch.Batch`): Pop the batch off + if it is found *and* is the batch that was sent. Defaults + to None (never pop). Returns: :class:~`pubsub_v1.batch.Batch` The batch object. """ - if topic not in self._batch: - self._batch[topic] = Batch( + # If there is no matching batch yet, then potentially create one + # and place it on the batches dictionary. + if topic not in self._batches: + if not create: + return None + self._batches[topic] = Batch( client=self, settings=self.batching, topic=topic, ) - return self._batch[topic] + + # If we are supposed to remove the batch, pop it off and return it. + if pop and self._batches[topic] == pop: + return self._batches.pop(topic) + + # Simply return the appropriate batch. + return self._batches[topic] def publish(self, topic, data, **attrs): """Publish a single message. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index 78e192a19c6a..b940e3efa11f 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -30,9 +30,8 @@ class Future(object): Args: batch (:class:~`pubsub_v1.batch.Batch`): The batch object that is committing this message. - client_id (str): The client ID of the message. """ - def __init__(self, batch, client_id): + def __init__(self, batch): self._batch = batch self._hash = hash(uuid.uuid4()) self._callbacks = queue.Queue() From 3b21b9353fb023d9041e697ef815e729512d72d5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 30 May 2017 14:48:48 -0700 Subject: [PATCH 07/86] WIP --- pubsub/google/cloud/pubsub.py | 2 ++ pubsub/google/cloud/pubsub_v1/publisher/batch.py | 14 ++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub.py b/pubsub/google/cloud/pubsub.py index 5e77fbced96f..bf094f6cf03a 100644 --- a/pubsub/google/cloud/pubsub.py +++ b/pubsub/google/cloud/pubsub.py @@ -16,9 +16,11 @@ from google.cloud.pubsub_v1 import PublisherClient from google.cloud.pubsub_v1 import SubscriberClient +from google.cloud.pubsub_v1 import types __all__ = ( 'PublisherClient', 'SubscriberClient', + 'types', ) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index c208318c1d7a..6a82cbb73797 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -48,11 +48,16 @@ class Batch(object): client (:class:`google.cloud.pubsub_v1.PublisherClient`): The publisher client used to create this batch. Batch settings are inferred from this. + topic (str): The topic. The format for this is + ``projects/{project}/topics/{topic}``. settings (:class:`google.cloud.pubsub_v1.types.Batching`): The settings for batch publishing. These should be considered immutable once the batch has been opened. + autocommit (bool): Whether to autocommit the batch when the time + has elapsed. Defaults to True unless ``settings.max_latency`` is + inf. """ - def __init__(self, client, topic, settings): + def __init__(self, client, topic, settings, autocommit=True): self._client = client self._topic = topic self._settings = settings @@ -63,8 +68,9 @@ def __init__(self, client, topic, settings): # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. - self._process = self._client.thread_class(target=self.monitor) - self._process.start() + if autocommit and self._settings.max_latency < float('inf'): + self._process = self._client.thread_class(target=self.monitor) + self._process.start() @property def client(self): @@ -100,7 +106,7 @@ def commit(self): self._status = 'in-flight' # Begin the request to publish these messages. - response = self._client.api.publish(self._topic, self.flush()) + response = self._client.api.publish(self._topic, list(self.flush())) # FIXME (lukesneeringer): How do I check for errors on this? self._status = 'success' From 1e879a1e8df4fbcbe94c665f4fa9888edf2c9217 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 31 May 2017 13:30:26 -0700 Subject: [PATCH 08/86] wip --- .../google/cloud/pubsub_v1/publisher/batch.py | 121 ++++++++++-------- .../cloud/pubsub_v1/publisher/client.py | 28 ++-- .../cloud/pubsub_v1/publisher/exceptions.py | 16 +++ .../cloud/pubsub_v1/publisher/future.py | 29 ++--- pubsub/google/cloud/pubsub_v1/retry.py | 35 +++++ pubsub/google/cloud/pubsub_v1/types.py | 2 +- 6 files changed, 150 insertions(+), 81 deletions(-) create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/exceptions.py create mode 100644 pubsub/google/cloud/pubsub_v1/retry.py diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index 6a82cbb73797..24a926e55a1e 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -22,10 +22,9 @@ import six from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import future -QueueItem = collections.namedtuple('QueueItem', ['message', 'future']) - class Batch(object): """A batch of messages. @@ -59,16 +58,24 @@ class Batch(object): """ def __init__(self, client, topic, settings, autocommit=True): self._client = client - self._topic = topic - self._settings = settings - self._messages = queue.Queue() - self._futures = queue.Queue() - self._status = 'accepting messages' - self._message_ids = {} + + # Create a namespace that is owned by the client manager; this + # is necessary to be able to have these values be communicable between + # processes. + self._ = self.manager.Namespace() + self._.futures = self.manager.list() + self._.messages = self.manager.list() + self._.message_ids = self.manager.dict() + self._.settings = settings + self._.status = 'accepting messages' + self._.topic = topic + + # This is purely internal tracking. + self._process = None # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. - if autocommit and self._settings.max_latency < float('inf'): + if autocommit and self._.settings.max_latency < float('inf'): self._process = self._client.thread_class(target=self.monitor) self._process.start() @@ -82,6 +89,16 @@ def client(self): """ return self._client + @property + def manager(self): + """Return the client's manager. + + Returns: + :class:`multiprocessing.Manager`: The manager responsible for + handling shared memory objects. + """ + return self._client.manager + @property def status(self): """Return the status of this batch. @@ -90,7 +107,7 @@ def status(self): str: The status of this batch. All statuses are human-readable, all-lowercase strings. """ - return self._status + return self._.status def commit(self): """Actually publish all of the messages on the active batch. @@ -99,52 +116,46 @@ def commit(self): batch on the publisher, and then the batch is discarded upon completion. """ - # If this is the active batch on the cleint right now, remove it. - self._client.batch(self._topic, pop=self) - # Update the status. - self._status = 'in-flight' + self._.status = 'in-flight' # Begin the request to publish these messages. - response = self._client.api.publish(self._topic, list(self.flush())) + if len(self._.messages) == 0: + raise Exception('Empty queue') + response = self._client.api.publish(self._.topic, self._.messages) + + # Sanity check: If the number of message IDs is not equal to the + # number of futures I have, then something went wrong. + if len(response.message_ids) != len(self._.futures): + raise exceptions.PublishError( + 'Some messages were not successfully published.', + ) # FIXME (lukesneeringer): How do I check for errors on this? - self._status = 'success' + self._.status = 'success' # Iterate over the futures on the queue and return the response IDs. # We are trusting that there is a 1:1 mapping, and raise an exception # if not. - try: - for message_id in response.message_ids: - future_ = self._futures.get(block=False) - self._message_ids[future_] = message_id - future_._trigger() - except queue.Empty: - raise ValueError('More message IDs came back than messages ' - 'were published.') - - # If the queue of futures is not empty, we did not get enough IDs - # back. - if self._futures.empty(): - raise ValueError('Fewer message IDs came back than messages ' - 'were published.') - - - def flush(self): - """Flush the messages off of this queue, one at a time. - - This method is called when the batch is committed. Calling it outside - of the context of committing will effectively remove messages - from the batch. - - Yields: - :class:~`pubsub_v1.types.PubsubMessage`: A Pub/Sub Message. + for mid, fut in zip(response.message_ids, self._.futures): + self._message_ids[fut] = mid + fut._trigger() + + def get_message_id(self, publish_future): + """Return the message ID corresponding to the given future. + + Args: + publish_future (:class:~`future.Future`): The future returned + from a ``publish`` call. + + Returns: + str: The message ID. + + Raises: + KeyError: If the future is not yet done or there is no message + ID corresponding to it. """ - try: - while True: - yield self._messages.get(block=False) - except queue.Empty: - raise StopIteration + return self._message_ids[publish_future] def monitor(self): """Commit this batch after sufficient time has elapsed. @@ -156,11 +167,11 @@ def monitor(self): # in a separate thread. # # Sleep for however long we should be waiting. - time.sleep(self._settings.max_latency) + time.sleep(self._.settings.max_latency) # If, in the intervening period, the batch started to be committed, # then no-op at this point. - if self._status != 'accepting messages': + if self._.status != 'accepting messages': return # Commit. @@ -216,10 +227,18 @@ def publish(self, data, **attrs): 'be sent as text strings.') # Store the actual message in the batch's message queue. - self._messages.put(types.PubsubMessage(data=data, attributes=attrs)) + self._.messages.append( + types.PubsubMessage(data=data, attributes=attrs), + ) # Return a Future. That future needs to be aware of the status # of this batch. - f = future.Future(self) - self._futures.put(f) + f = future.Future(self._) + self._.futures.append(f) return f + + +# Make a fake batch. This is used by the client to do single-op checks +# for batch existence. +FakeBatch = collections.namedtuple('FakeBatch', ['status']) +FAKE = FakeBatch(status='fake') diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 456da3c9e148..6d0598bacda4 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -25,6 +25,7 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher.batch import Batch +from google.cloud.pubsub_v1.publisher.batch import FAKE __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -58,7 +59,11 @@ def __init__(self, batching=(), thread_class=multiprocessing.Process, kwargs['lib_name'] = 'gccl' kwargs['lib_version'] = __VERSION__ self.api = self._gapic_class(**kwargs) - self.batching = types.Batching(batching) + self.batching = types.Batching(*batching) + + # Set the manager, which is responsible for granting shared memory + # objects. + self._manager = multiprocessing.Manager() # Set the thread class. self._thread_class = thread_class @@ -67,6 +72,16 @@ def __init__(self, batching=(), thread_class=multiprocessing.Process, # messages. One batch exists for each topic. self._batches = {} + @property + def manager(self): + """Return the manager. + + Returns: + :class:`multiprocessing.Manager`: The manager responsible for + handling shared memory objects. + """ + return self._manager + @property def thread_class(self): """Return the thread class provided at instantiation. @@ -76,7 +91,7 @@ def thread_class(self): """ return self._thread_class - def batch(self, topic, create=True, pop=None): + def batch(self, topic, create=True): """Return the current batch. This will create a new batch only if no batch currently exists. @@ -85,16 +100,13 @@ def batch(self, topic, create=True, pop=None): topic (str): A string representing the topic. create (bool): Whether to create a new batch if no batch is found. Defaults to True. - pop (:class:~`pubsub_v1.batch.Batch`): Pop the batch off - if it is found *and* is the batch that was sent. Defaults - to None (never pop). Returns: :class:~`pubsub_v1.batch.Batch` The batch object. """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - if topic not in self._batches: + if self._batches.get(topic, FAKE).status != 'accepting messages': if not create: return None self._batches[topic] = Batch( @@ -103,10 +115,6 @@ def batch(self, topic, create=True, pop=None): topic=topic, ) - # If we are supposed to remove the batch, pop it off and return it. - if pop and self._batches[topic] == pop: - return self._batches.pop(topic) - # Simply return the appropriate batch. return self._batches[topic] diff --git a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py new file mode 100644 index 000000000000..e37993b24035 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -0,0 +1,16 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class PublishError(RuntimeError): + pass diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index b940e3efa11f..d52b9c889e52 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -28,11 +28,11 @@ class Future(object): methods in this library. Args: - batch (:class:~`pubsub_v1.batch.Batch`): The batch object that - is committing this message. + batch (:class:`multiprocessing.Namespace`): Information about the + batch object that is committing this message. """ - def __init__(self, batch): - self._batch = batch + def __init__(self, batch_info): + self._batch_info = batch_info self._hash = hash(uuid.uuid4()) self._callbacks = queue.Queue() @@ -66,7 +66,7 @@ def done(self): This still returns True in failure cases; checking `result` or `exception` is the canonical way to assess success or failure. """ - return self.batch.status in ('success', 'error') + return self._batch_info.status in ('success', 'error') def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -88,7 +88,7 @@ def result(self, timeout=None): # return an appropriate value. err = self.exception(timeout=timeout) if err is None: - return self.batch.get_message_id(self._client_id) + return self._batch_info.message_ids[self] raise err def exception(self, timeout=None, _wait=1): @@ -102,18 +102,18 @@ def exception(self, timeout=None, _wait=1): times out and raises TimeoutError. Raises: - :class:~`pubsub_v1.TimeoutError`: If the request times out. + :exc:`TimeoutError`: If the request times out. Returns: :class:`Exception`: The exception raised by the call, if any. """ # If the batch completed successfully, this should return None. - if self.batch.status == 'success': + if self.batch_info.status == 'success': return None # If this batch had an error, this should return it. - if self.batch.status == 'error': - return self.batch._error + if self.batch_info.status == 'error': + return self.batch_info.error # If the timeout has been exceeded, raise TimeoutError. if timeout < 0: @@ -151,12 +151,3 @@ def _trigger(self): callback(self) except queue.Empty: return None - - -class TimeoutError(object): - """Exception subclass for timeout-related errors. - - This exception is only returned by the :class:~`pubsub_v1.future.Future` - class. - """ - pass diff --git a/pubsub/google/cloud/pubsub_v1/retry.py b/pubsub/google/cloud/pubsub_v1/retry.py new file mode 100644 index 000000000000..3c098faa1e37 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/retry.py @@ -0,0 +1,35 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +def retry(func, delay=0, count=0, err=None, **kwargs): + """Attempt to retry a function after the provided delay. + + If there have been too many retries, raise an exception. + + Args: + func (callable): The function to retry. + delay (int): The period to delay before retrying; specified in seconds. + count (int): The number of previous retries that have occurred. + If this is >= 5, an exception will be raised. + **kwargs (dict): Other keyword arguments to pass to the function. + """ + # If there have been too many retries, simply raise the exception. + if count >= 5: + raise err + + # Sleep the given delay. + time.sleep(delay) + + # Try calling the method again. + return func(delay=delay, count=count, **kwargs) diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index bb7c6fe2721c..b500d159a3a6 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -33,7 +33,7 @@ ) Batching.__new__.__defaults__ = ( 1024 * 1024 * 5, # max_bytes: 5 MB - 0.001, # max_latency: 1 millisecond + 0.25, # max_latency: 0.25 seconds 1000, # max_messages: 1,000 ) From 4bf05527681aac2883cb7f2c1b98852efa6647e9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 31 May 2017 14:15:54 -0700 Subject: [PATCH 09/86] wip --- .../google/cloud/pubsub_v1/publisher/batch.py | 18 +----------------- .../google/cloud/pubsub_v1/publisher/future.py | 12 ++++-------- 2 files changed, 5 insertions(+), 25 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index 24a926e55a1e..eacfb1a5009a 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -138,25 +138,9 @@ def commit(self): # We are trusting that there is a 1:1 mapping, and raise an exception # if not. for mid, fut in zip(response.message_ids, self._.futures): - self._message_ids[fut] = mid + self._.message_ids[fut] = mid fut._trigger() - def get_message_id(self, publish_future): - """Return the message ID corresponding to the given future. - - Args: - publish_future (:class:~`future.Future`): The future returned - from a ``publish`` call. - - Returns: - str: The message ID. - - Raises: - KeyError: If the future is not yet done or there is no message - ID corresponding to it. - """ - return self._message_ids[publish_future] - def monitor(self): """Commit this batch after sufficient time has elapsed. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index d52b9c889e52..feaa6b6bf902 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -34,7 +34,7 @@ class Future(object): def __init__(self, batch_info): self._batch_info = batch_info self._hash = hash(uuid.uuid4()) - self._callbacks = queue.Queue() + self._callbacks = [] def __hash__(self): return self._hash @@ -134,7 +134,7 @@ def add_done_callback(self, fn): """ if self.done(): fn(self) - self._callbacks.put(fn) + self._callbacks.append(fn) def _trigger(self): """Trigger all callbacks registered to this Future. @@ -145,9 +145,5 @@ def _trigger(self): Args: message_id (str): The message ID, as a string. """ - try: - while True: - callback = self._callbacks.get(block=False) - callback(self) - except queue.Empty: - return None + for callback in self._callbacks: + callback(self) From c6dc098edc1953e6fca84eb70f880da535383ce2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 31 May 2017 15:28:17 -0700 Subject: [PATCH 10/86] wip --- .../google/cloud/pubsub_v1/publisher/batch.py | 15 +++++++----- .../cloud/pubsub_v1/publisher/client.py | 5 +++- .../cloud/pubsub_v1/publisher/future.py | 23 +++++++++---------- 3 files changed, 24 insertions(+), 19 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index eacfb1a5009a..21213d1592ec 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -124,6 +124,9 @@ def commit(self): raise Exception('Empty queue') response = self._client.api.publish(self._.topic, self._.messages) + # We got a response from Pub/Sub; denote that we are processing. + self._status = 'processing results' + # Sanity check: If the number of message IDs is not equal to the # number of futures I have, then something went wrong. if len(response.message_ids) != len(self._.futures): @@ -131,15 +134,15 @@ def commit(self): 'Some messages were not successfully published.', ) - # FIXME (lukesneeringer): How do I check for errors on this? - self._.status = 'success' - # Iterate over the futures on the queue and return the response IDs. # We are trusting that there is a 1:1 mapping, and raise an exception # if not. - for mid, fut in zip(response.message_ids, self._.futures): - self._.message_ids[fut] = mid - fut._trigger() + for message_id, fut in zip(response.message_ids, self._.futures): + fut._resolve(result=message_id) + + # We were successful; denote this. + self._.status = 'success' + def monitor(self): """Commit this batch after sufficient time has elapsed. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 6d0598bacda4..d3a1ecb6264e 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -91,7 +91,7 @@ def thread_class(self): """ return self._thread_class - def batch(self, topic, create=True): + def batch(self, topic, create=True, autocommit=True): """Return the current batch. This will create a new batch only if no batch currently exists. @@ -100,6 +100,8 @@ def batch(self, topic, create=True): topic (str): A string representing the topic. create (bool): Whether to create a new batch if no batch is found. Defaults to True. + autocommit (bool): Whether to autocommit this batch. + This is primarily useful for debugging. Returns: :class:~`pubsub_v1.batch.Batch` The batch object. @@ -110,6 +112,7 @@ def batch(self, topic, create=True): if not create: return None self._batches[topic] = Batch( + autocommit=autocommit, client=self, settings=self.batching, topic=topic, diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index feaa6b6bf902..4af21baecd59 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -33,12 +33,10 @@ class Future(object): """ def __init__(self, batch_info): self._batch_info = batch_info - self._hash = hash(uuid.uuid4()) + self._result = None + self._error = None self._callbacks = [] - def __hash__(self): - return self._hash - def cancel(self): """Publishes in Pub/Sub currently may not be canceled. @@ -88,7 +86,7 @@ def result(self, timeout=None): # return an appropriate value. err = self.exception(timeout=timeout) if err is None: - return self._batch_info.message_ids[self] + return self._result raise err def exception(self, timeout=None, _wait=1): @@ -107,16 +105,16 @@ def exception(self, timeout=None, _wait=1): Returns: :class:`Exception`: The exception raised by the call, if any. """ + # If this batch had an error, this should return it. + if self._batch_info.status == 'error': + return self._error + # If the batch completed successfully, this should return None. - if self.batch_info.status == 'success': + if self._batch_info.status == 'success': return None - # If this batch had an error, this should return it. - if self.batch_info.status == 'error': - return self.batch_info.error - # If the timeout has been exceeded, raise TimeoutError. - if timeout < 0: + if timeout and timeout < 0: raise TimeoutError('Timed out waiting for an exception.') # Wait a little while and try again. @@ -136,7 +134,7 @@ def add_done_callback(self, fn): fn(self) self._callbacks.append(fn) - def _trigger(self): + def _resolve(self, result): """Trigger all callbacks registered to this Future. This method is called internally by the batch once the batch @@ -145,5 +143,6 @@ def _trigger(self): Args: message_id (str): The message ID, as a string. """ + self._result = result for callback in self._callbacks: callback(self) From 13821a77c6524a675c8555faeece7af524157654 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 31 May 2017 15:29:48 -0700 Subject: [PATCH 11/86] wip --- .../google/cloud/pubsub_v1/publisher/batch.py | 7 ++----- .../cloud/pubsub_v1/publisher/future.py | 21 ++++++++++--------- 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index 21213d1592ec..589d432b32d1 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -137,12 +137,9 @@ def commit(self): # Iterate over the futures on the queue and return the response IDs. # We are trusting that there is a 1:1 mapping, and raise an exception # if not. - for message_id, fut in zip(response.message_ids, self._.futures): - fut._resolve(result=message_id) - - # We were successful; denote this. self._.status = 'success' - + for message_id, fut in zip(response.message_ids, self._.futures): + fut._trigger(result=message_id) def monitor(self): """Commit this batch after sufficient time has elapsed. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index 4af21baecd59..a744b51ef1a1 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -33,10 +33,12 @@ class Future(object): """ def __init__(self, batch_info): self._batch_info = batch_info - self._result = None - self._error = None + self._hash = hash(uuid.uuid4()) self._callbacks = [] + def __hash__(self): + return self._hash + def cancel(self): """Publishes in Pub/Sub currently may not be canceled. @@ -86,7 +88,7 @@ def result(self, timeout=None): # return an appropriate value. err = self.exception(timeout=timeout) if err is None: - return self._result + return self._batch_info.message_ids[hash(self)] raise err def exception(self, timeout=None, _wait=1): @@ -105,14 +107,14 @@ def exception(self, timeout=None, _wait=1): Returns: :class:`Exception`: The exception raised by the call, if any. """ - # If this batch had an error, this should return it. - if self._batch_info.status == 'error': - return self._error - # If the batch completed successfully, this should return None. - if self._batch_info.status == 'success': + if self.batch_info.status == 'success': return None + # If this batch had an error, this should return it. + if self.batch_info.status == 'error': + return self.batch_info.error + # If the timeout has been exceeded, raise TimeoutError. if timeout and timeout < 0: raise TimeoutError('Timed out waiting for an exception.') @@ -134,7 +136,7 @@ def add_done_callback(self, fn): fn(self) self._callbacks.append(fn) - def _resolve(self, result): + def _trigger(self): """Trigger all callbacks registered to this Future. This method is called internally by the batch once the batch @@ -143,6 +145,5 @@ def _resolve(self, result): Args: message_id (str): The message ID, as a string. """ - self._result = result for callback in self._callbacks: callback(self) From c2d7af86460ba59daf3d8a0b1f025c1ce8b78459 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 1 Jun 2017 07:30:14 -0700 Subject: [PATCH 12/86] wip --- pubsub/google/cloud/pubsub_v1/publisher/batch.py | 5 ++++- pubsub/google/cloud/pubsub_v1/publisher/future.py | 7 ++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index 589d432b32d1..2e8744eb5289 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -124,6 +124,8 @@ def commit(self): raise Exception('Empty queue') response = self._client.api.publish(self._.topic, self._.messages) + # FIXME (lukesneeringer): Check for failures; retry. + # We got a response from Pub/Sub; denote that we are processing. self._status = 'processing results' @@ -139,7 +141,8 @@ def commit(self): # if not. self._.status = 'success' for message_id, fut in zip(response.message_ids, self._.futures): - fut._trigger(result=message_id) + self._.message_ids[hash(fut)] = message_id + fut._trigger() def monitor(self): """Commit this batch after sufficient time has elapsed. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index a744b51ef1a1..486e94700283 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -16,6 +16,7 @@ import queue import uuid +import time class Future(object): @@ -108,12 +109,12 @@ def exception(self, timeout=None, _wait=1): :class:`Exception`: The exception raised by the call, if any. """ # If the batch completed successfully, this should return None. - if self.batch_info.status == 'success': + if self._batch_info.status == 'success': return None # If this batch had an error, this should return it. - if self.batch_info.status == 'error': - return self.batch_info.error + if self._batch_info.status == 'error': + return self._batch_info.error # If the timeout has been exceeded, raise TimeoutError. if timeout and timeout < 0: From 98c1c6c1aca83ac84db40631abe583f69a2eaa60 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 1 Jun 2017 10:09:02 -0700 Subject: [PATCH 13/86] Add GAPIC/proto in the base branch to remove them from comparison. --- pubsub/google/cloud/gapic/__init__.py | 1 + pubsub/google/cloud/gapic/pubsub/__init__.py | 1 + .../google/cloud/gapic/pubsub/v1/__init__.py | 0 .../cloud/gapic/pubsub/v1/publisher_client.py | 565 ++++ .../pubsub/v1/publisher_client_config.json | 98 + .../gapic/pubsub/v1/subscriber_client.py | 1065 +++++++ .../pubsub/v1/subscriber_client_config.json | 129 + pubsub/google/cloud/proto/__init__.py | 1 + pubsub/google/cloud/proto/pubsub/__init__.py | 1 + .../google/cloud/proto/pubsub/v1/__init__.py | 1 + .../cloud/proto/pubsub/v1/pubsub_pb2.py | 2671 +++++++++++++++++ .../cloud/proto/pubsub/v1/pubsub_pb2_grpc.py | 461 +++ 12 files changed, 4994 insertions(+) create mode 100644 pubsub/google/cloud/gapic/__init__.py create mode 100644 pubsub/google/cloud/gapic/pubsub/__init__.py create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/__init__.py create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py create mode 100644 pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json create mode 100644 pubsub/google/cloud/proto/__init__.py create mode 100644 pubsub/google/cloud/proto/pubsub/__init__.py create mode 100644 pubsub/google/cloud/proto/pubsub/v1/__init__.py create mode 100644 pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py create mode 100644 pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py diff --git a/pubsub/google/cloud/gapic/__init__.py b/pubsub/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/gapic/pubsub/__init__.py b/pubsub/google/cloud/gapic/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/__init__.py b/pubsub/google/cloud/gapic/pubsub/v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py new file mode 100644 index 000000000000..c0466e6d444b --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py @@ -0,0 +1,565 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Publisher API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class PublisherClient(object): + """ + The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_topics': + _PageDesc('page_token', 'next_page_token', 'topics'), + 'list_topic_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A PublisherClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'publisher_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Publisher', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.publisher_stub = config.create_stub( + pubsub_pb2.PublisherStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_topic = api_callable.create_api_call( + self.publisher_stub.CreateTopic, settings=defaults['create_topic']) + self._publish = api_callable.create_api_call( + self.publisher_stub.Publish, settings=defaults['publish']) + self._get_topic = api_callable.create_api_call( + self.publisher_stub.GetTopic, settings=defaults['get_topic']) + self._list_topics = api_callable.create_api_call( + self.publisher_stub.ListTopics, settings=defaults['list_topics']) + self._list_topic_subscriptions = api_callable.create_api_call( + self.publisher_stub.ListTopicSubscriptions, + settings=defaults['list_topic_subscriptions']) + self._delete_topic = api_callable.create_api_call( + self.publisher_stub.DeleteTopic, settings=defaults['delete_topic']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_topic(self, name, options=None): + """ + Creates the given topic with the given name. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> name = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_topic(name) + + Args: + name (string): The name of the topic. It must have the format + ``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter, + and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent + signs (``%``). It must be between 3 and 255 characters in length, and it + must not start with ``\"goog\"``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Topic(name=name) + return self._create_topic(request, options) + + def publish(self, topic, messages, options=None): + """ + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'' + >>> messages_element = pubsub_pb2.PubsubMessage(data=data) + >>> messages = [messages_element] + >>> response = client.publish(topic, messages) + + Args: + topic (string): The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages (list[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PubsubMessage`]): The messages to publish. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PublishResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + return self._publish(request, options) + + def get_topic(self, topic, options=None): + """ + Gets the configuration of a topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_topic(topic) + + Args: + topic (string): The name of the topic to get. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetTopicRequest(topic=topic) + return self._get_topic(request, options) + + def list_topics(self, project, page_size=None, options=None): + """ + Lists matching topics. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topics(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that topics belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicsRequest( + project=project, page_size=page_size) + return self._list_topics(request, options) + + def list_topic_subscriptions(self, topic, page_size=None, options=None): + """ + Lists the name of the subscriptions for this topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topic_subscriptions(topic): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + topic (string): The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of string instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicSubscriptionsRequest( + topic=topic, page_size=page_size) + return self._list_topic_subscriptions(request, options) + + def delete_topic(self, topic, options=None): + """ + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> client.delete_topic(topic) + + Args: + topic (string): Name of the topic to delete. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteTopicRequest(topic=topic) + self._delete_topic(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.iam.v1 import policy_pb2 + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json new file mode 100644 index 000000000000..7e8a723499e6 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json @@ -0,0 +1,98 @@ +{ + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "one_plus_delivery": [ + "CANCELLED", + "UNKNOWN", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "ABORTED", + "INTERNAL", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Publish": { + "timeout_millis": 60000, + "retry_codes_name": "one_plus_delivery", + "retry_params_name": "messaging", + "bundling": { + "element_count_threshold": 10, + "element_count_limit": 1000, + "request_byte_threshold": 1024, + "request_byte_limit": 10485760, + "delay_threshold_millis": 10 + } + }, + "GetTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopics": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopicSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py new file mode 100644 index 000000000000..ab8233824595 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -0,0 +1,1065 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Subscriber API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +from google.gax.utils import oneof +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class SubscriberClient(object): + """ + The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the ``Pull`` method. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions'), + 'list_snapshots': + _PageDesc('page_token', 'next_page_token', 'snapshots') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _SNAPSHOT_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/snapshots/{snapshot}') + _SUBSCRIPTION_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/subscriptions/{subscription}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def snapshot_path(cls, project, snapshot): + """Returns a fully-qualified snapshot resource name string.""" + return cls._SNAPSHOT_PATH_TEMPLATE.render({ + 'project': project, + 'snapshot': snapshot, + }) + + @classmethod + def subscription_path(cls, project, subscription): + """Returns a fully-qualified subscription resource name string.""" + return cls._SUBSCRIPTION_PATH_TEMPLATE.render({ + 'project': + project, + 'subscription': + subscription, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_snapshot_name(cls, snapshot_name): + """Parses the project from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the project. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('project') + + @classmethod + def match_snapshot_from_snapshot_name(cls, snapshot_name): + """Parses the snapshot from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the snapshot. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('snapshot') + + @classmethod + def match_project_from_subscription_name(cls, subscription_name): + """Parses the project from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the project. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'project') + + @classmethod + def match_subscription_from_subscription_name(cls, subscription_name): + """Parses the subscription from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the subscription. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'subscription') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A SubscriberClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'subscriber_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Subscriber', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.subscriber_stub = config.create_stub( + pubsub_pb2.SubscriberStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_subscription = api_callable.create_api_call( + self.subscriber_stub.CreateSubscription, + settings=defaults['create_subscription']) + self._get_subscription = api_callable.create_api_call( + self.subscriber_stub.GetSubscription, + settings=defaults['get_subscription']) + self._update_subscription = api_callable.create_api_call( + self.subscriber_stub.UpdateSubscription, + settings=defaults['update_subscription']) + self._list_subscriptions = api_callable.create_api_call( + self.subscriber_stub.ListSubscriptions, + settings=defaults['list_subscriptions']) + self._delete_subscription = api_callable.create_api_call( + self.subscriber_stub.DeleteSubscription, + settings=defaults['delete_subscription']) + self._modify_ack_deadline = api_callable.create_api_call( + self.subscriber_stub.ModifyAckDeadline, + settings=defaults['modify_ack_deadline']) + self._acknowledge = api_callable.create_api_call( + self.subscriber_stub.Acknowledge, settings=defaults['acknowledge']) + self._pull = api_callable.create_api_call( + self.subscriber_stub.Pull, settings=defaults['pull']) + self._streaming_pull = api_callable.create_api_call( + self.subscriber_stub.StreamingPull, + settings=defaults['streaming_pull']) + self._modify_push_config = api_callable.create_api_call( + self.subscriber_stub.ModifyPushConfig, + settings=defaults['modify_push_config']) + self._list_snapshots = api_callable.create_api_call( + self.subscriber_stub.ListSnapshots, + settings=defaults['list_snapshots']) + self._create_snapshot = api_callable.create_api_call( + self.subscriber_stub.CreateSnapshot, + settings=defaults['create_snapshot']) + self._delete_snapshot = api_callable.create_api_call( + self.subscriber_stub.DeleteSnapshot, + settings=defaults['delete_snapshot']) + self._seek = api_callable.create_api_call( + self.subscriber_stub.Seek, settings=defaults['seek']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_subscription(self, + name, + topic, + push_config=None, + ack_deadline_seconds=None, + retain_acked_messages=None, + message_retention_duration=None, + options=None): + """ + Creates a subscription to a given topic. + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + `resource name format `_. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_subscription(name, topic) + + Args: + name (string): The name of the subscription. It must have the format + ``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must + start with a letter, and contain only letters (``[A-Za-z]``), numbers + (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters + in length, and it must not start with ``\"goog\"``. + topic (string): The name of the topic from which this subscription is receiving messages. + Format is ``projects/{project}/topics/{topic}``. + The value of this field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that the subscriber + will pull and ack messages using API methods. + ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message + before the subscriber should acknowledge the message. After message + delivery but before the ack deadline expires and before the message is + acknowledged, it is an outstanding message and will not be delivered + again during that time (on a best-effort basis). + + For pull subscriptions, this value is used as the initial value for the ack + deadline. To override this value for a given message, call + ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using + pull. + The minimum custom deadline you can specify is 10 seconds. + The maximum custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + + For push delivery, this value is also used to set the request timeout for + the call to the push endpoint. + + If the subscriber never acknowledges the message, the Pub/Sub + system will eventually redeliver the message. + retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then + messages are not expunged from the subscription's backlog, even if they are + acknowledged, until they fall out of the ``message_retention_duration`` + window. + message_retention_duration (:class:`google.protobuf.duration_pb2.Duration`): How long to retain unacknowledged messages in the subscription's backlog, + from the moment a message is published. + If ``retain_acked_messages`` is true, then this also configures the retention + of acknowledged messages, and thus configures how far back in time a ``Seek`` + can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Subscription( + name=name, + topic=topic, + push_config=push_config, + ack_deadline_seconds=ack_deadline_seconds, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration) + return self._create_subscription(request, options) + + def get_subscription(self, subscription, options=None): + """ + Gets the configuration details of a subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_subscription(subscription) + + Args: + subscription (string): The name of the subscription to get. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + return self._get_subscription(request, options) + + def update_subscription(self, subscription, update_mask, options=None): + """ + Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> from google.protobuf import field_mask_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = pubsub_pb2.Subscription() + >>> update_mask = field_mask_pb2.FieldMask() + >>> response = client.update_subscription(subscription, update_mask) + + Args: + subscription (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription`): The updated subscription object. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.UpdateSubscriptionRequest( + subscription=subscription, update_mask=update_mask) + return self._update_subscription(request, options) + + def list_subscriptions(self, project, page_size=None, options=None): + """ + Lists matching subscriptions. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_subscriptions(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSubscriptionsRequest( + project=project, page_size=page_size) + return self._list_subscriptions(request, options) + + def delete_subscription(self, subscription, options=None): + """ + Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to ``Pull`` after deletion will return + ``NOT_FOUND``. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> client.delete_subscription(subscription) + + Args: + subscription (string): The subscription to delete. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSubscriptionRequest( + subscription=subscription) + self._delete_subscription(request, options) + + def modify_ack_deadline(self, + subscription, + ack_ids, + ack_deadline_seconds, + options=None): + """ + Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level ``ackDeadlineSeconds`` used for subsequent messages. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> ack_deadline_seconds = 0 + >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): List of acknowledgment IDs. + ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to + the Pub/Sub system. For example, if the value is 10, the new + ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero may immediately make the message available for + another pull request. + The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 minutes). + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyAckDeadlineRequest( + subscription=subscription, + ack_ids=ack_ids, + ack_deadline_seconds=ack_deadline_seconds) + self._modify_ack_deadline(request, options) + + def acknowledge(self, subscription, ack_ids, options=None): + """ + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> client.acknowledge(subscription, ack_ids) + + Args: + subscription (string): The subscription whose message is being acknowledged. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): The acknowledgment ID for the messages being acknowledged that was returned + by the Pub/Sub system in the ``Pull`` response. Must not be empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.AcknowledgeRequest( + subscription=subscription, ack_ids=ack_ids) + self._acknowledge(request, options) + + def pull(self, + subscription, + max_messages, + return_immediately=None, + options=None): + """ + Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return ``UNAVAILABLE`` if + there are too many concurrent pull requests pending for the given + subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> max_messages = 0 + >>> response = client.pull(subscription, max_messages) + + Args: + subscription (string): The subscription from which messages should be pulled. + Format is ``projects/{project}/subscriptions/{sub}``. + max_messages (int): The maximum number of messages returned for this request. The Pub/Sub + system may return fewer than the number specified. + return_immediately (bool): If this field set to true, the system will respond immediately even if + it there are no messages available to return in the ``Pull`` response. + Otherwise, the system may wait (for a bounded amount of time) until at + least one message is available, rather than returning no messages. The + client may cancel the request if it does not wish to wait any longer for + the response. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PullResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PullRequest( + subscription=subscription, + max_messages=max_messages, + return_immediately=return_immediately) + return self._pull(request, options) + + def streaming_pull(self, requests, options=None): + """ + (EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status ``OK`` to reassign + server-side resources, in which case, the client should re-establish the + stream. ``UNAVAILABLE`` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> stream_ack_deadline_seconds = 0 + >>> request = pubsub_pb2.StreamingPullRequest(subscription=subscription, stream_ack_deadline_seconds=stream_ack_deadline_seconds) + >>> requests = [request] + >>> for element in client.streaming_pull(requests): + >>> # process element + >>> pass + + Args: + requests (iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullRequest`]): The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullResponse`]. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_pull(requests, options) + + def modify_push_config(self, subscription, push_config, options=None): + """ + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty ``PushConfig``) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the ``PushConfig``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> push_config = pubsub_pb2.PushConfig() + >>> client.modify_push_config(subscription, push_config) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system should + stop pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyPushConfigRequest( + subscription=subscription, push_config=push_config) + self._modify_push_config(request, options) + + def list_snapshots(self, project, page_size=None, options=None): + """ + Lists the existing snapshots. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_snapshots(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that snapshots belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSnapshotsRequest( + project=project, page_size=page_size) + return self._list_snapshots(request, options) + + def create_snapshot(self, name, subscription, options=None): + """ + Creates a snapshot from the requested subscription. + If the snapshot already exists, returns ``ALREADY_EXISTS``. + If the requested subscription doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + `resource name format `_. + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.create_snapshot(name, subscription) + + Args: + name (string): Optional user-provided name for this snapshot. + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription. + Note that for REST API requests, you must specify a name. + Format is ``projects/{project}/snapshots/{snap}``. + subscription (string): The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + (a) The existing backlog on the subscription. More precisely, this is + :: + + defined as the messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + `CreateSnapshot` request; as well as: + (b) Any messages published to the subscription's topic following the + :: + + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.CreateSnapshotRequest( + name=name, subscription=subscription) + return self._create_snapshot(request, options) + + def delete_snapshot(self, snapshot, options=None): + """ + Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> client.delete_snapshot(snapshot) + + Args: + snapshot (string): The name of the snapshot to delete. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + self._delete_snapshot(request, options) + + def seek(self, subscription, time=None, snapshot=None, options=None): + """ + Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.seek(subscription) + + Args: + subscription (string): The subscription to affect. + time (:class:`google.protobuf.timestamp_pb2.Timestamp`): The time to seek to. + Messages retained in the subscription that were published before this + time are marked as acknowledged, and messages retained in the + subscription that were published after this time are marked as + unacknowledged. Note that this operation affects only those messages + retained in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). For example, + if ``time`` corresponds to a point before the message retention + window (or to a point before the system's notion of the subscription + creation time), only retained messages will be marked as unacknowledged, + and already-expunged messages will not be restored. + snapshot (string): The snapshot to seek to. The snapshot's topic must be the same as that of + the provided subscription. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.SeekResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + oneof.check_oneof( + time=time, + snapshot=snapshot, ) + + # Create the request object. + request = pubsub_pb2.SeekRequest( + subscription=subscription, time=time, snapshot=snapshot) + return self._seek(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.iam.v1 import policy_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json new file mode 100644 index 000000000000..4b31158fbac8 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json @@ -0,0 +1,129 @@ +{ + "interfaces": { + "google.pubsub.v1.Subscriber": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ], + "pull": [ + "CANCELLED", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "INTERNAL", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ModifyAckDeadline": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Acknowledge": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "messaging" + }, + "Pull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "messaging" + }, + "StreamingPull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "messaging" + }, + "ModifyPushConfig": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListSnapshots": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Seek": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/pubsub/google/cloud/proto/__init__.py b/pubsub/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/proto/pubsub/__init__.py b/pubsub/google/cloud/proto/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/proto/pubsub/v1/__init__.py b/pubsub/google/cloud/proto/pubsub/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py new file mode 100644 index 000000000000..07919f8c5646 --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py @@ -0,0 +1,2671 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/pubsub/v1/pubsub.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/pubsub/v1/pubsub.proto', + package='google.pubsub.v1', + syntax='proto3', + serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x15\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xda\x01\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"X\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xe8\x0f\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9b\x06\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TOPIC = _descriptor.Descriptor( + name='Topic', + full_name='google.pubsub.v1.Topic', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Topic.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=221, + serialized_end=242, +) + + +_PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PubsubMessage.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=415, + serialized_end=464, +) + +_PUBSUBMESSAGE = _descriptor.Descriptor( + name='PubsubMessage', + full_name='google.pubsub.v1.PubsubMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='google.pubsub.v1.PubsubMessage.data', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PubsubMessage.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_id', full_name='google.pubsub.v1.PubsubMessage.message_id', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='publish_time', full_name='google.pubsub.v1.PubsubMessage.publish_time', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=245, + serialized_end=464, +) + + +_GETTOPICREQUEST = _descriptor.Descriptor( + name='GetTopicRequest', + full_name='google.pubsub.v1.GetTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.GetTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=466, + serialized_end=498, +) + + +_PUBLISHREQUEST = _descriptor.Descriptor( + name='PublishRequest', + full_name='google.pubsub.v1.PublishRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.PublishRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='messages', full_name='google.pubsub.v1.PublishRequest.messages', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=500, + serialized_end=582, +) + + +_PUBLISHRESPONSE = _descriptor.Descriptor( + name='PublishResponse', + full_name='google.pubsub.v1.PublishResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_ids', full_name='google.pubsub.v1.PublishResponse.message_ids', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=584, + serialized_end=622, +) + + +_LISTTOPICSREQUEST = _descriptor.Descriptor( + name='ListTopicsRequest', + full_name='google.pubsub.v1.ListTopicsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListTopicsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=624, + serialized_end=699, +) + + +_LISTTOPICSRESPONSE = _descriptor.Descriptor( + name='ListTopicsResponse', + full_name='google.pubsub.v1.ListTopicsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topics', full_name='google.pubsub.v1.ListTopicsResponse.topics', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=701, + serialized_end=787, +) + + +_LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListTopicSubscriptionsRequest', + full_name='google.pubsub.v1.ListTopicSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=789, + serialized_end=874, +) + + +_LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListTopicSubscriptionsResponse', + full_name='google.pubsub.v1.ListTopicSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=876, + serialized_end=956, +) + + +_DELETETOPICREQUEST = _descriptor.Descriptor( + name='DeleteTopicRequest', + full_name='google.pubsub.v1.DeleteTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.DeleteTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=958, + serialized_end=993, +) + + +_SUBSCRIPTION = _descriptor.Descriptor( + name='Subscription', + full_name='google.pubsub.v1.Subscription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Subscription.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Subscription.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.Subscription.push_config', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.Subscription.ack_deadline_seconds', index=3, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='retain_acked_messages', full_name='google.pubsub.v1.Subscription.retain_acked_messages', index=4, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_retention_duration', full_name='google.pubsub.v1.Subscription.message_retention_duration', index=5, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=996, + serialized_end=1214, +) + + +_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PushConfig.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PushConfig.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PushConfig.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=415, + serialized_end=464, +) + +_PUSHCONFIG = _descriptor.Descriptor( + name='PushConfig', + full_name='google.pubsub.v1.PushConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='push_endpoint', full_name='google.pubsub.v1.PushConfig.push_endpoint', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PushConfig.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1217, + serialized_end=1369, +) + + +_RECEIVEDMESSAGE = _descriptor.Descriptor( + name='ReceivedMessage', + full_name='google.pubsub.v1.ReceivedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ack_id', full_name='google.pubsub.v1.ReceivedMessage.ack_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='google.pubsub.v1.ReceivedMessage.message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1371, + serialized_end=1454, +) + + +_GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='GetSubscriptionRequest', + full_name='google.pubsub.v1.GetSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.GetSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1456, + serialized_end=1502, +) + + +_UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='UpdateSubscriptionRequest', + full_name='google.pubsub.v1.UpdateSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.UpdateSubscriptionRequest.subscription', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSubscriptionRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1505, + serialized_end=1635, +) + + +_LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListSubscriptionsRequest', + full_name='google.pubsub.v1.ListSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSubscriptionsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1637, + serialized_end=1719, +) + + +_LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListSubscriptionsResponse', + full_name='google.pubsub.v1.ListSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListSubscriptionsResponse.subscriptions', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1721, + serialized_end=1828, +) + + +_DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='DeleteSubscriptionRequest', + full_name='google.pubsub.v1.DeleteSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.DeleteSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1830, + serialized_end=1879, +) + + +_MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( + name='ModifyPushConfigRequest', + full_name='google.pubsub.v1.ModifyPushConfigRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyPushConfigRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.ModifyPushConfigRequest.push_config', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1881, + serialized_end=1979, +) + + +_PULLREQUEST = _descriptor.Descriptor( + name='PullRequest', + full_name='google.pubsub.v1.PullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.PullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_immediately', full_name='google.pubsub.v1.PullRequest.return_immediately', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_messages', full_name='google.pubsub.v1.PullRequest.max_messages', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1981, + serialized_end=2066, +) + + +_PULLRESPONSE = _descriptor.Descriptor( + name='PullResponse', + full_name='google.pubsub.v1.PullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.PullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2068, + serialized_end=2144, +) + + +_MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( + name='ModifyAckDeadlineRequest', + full_name='google.pubsub.v1.ModifyAckDeadlineRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids', index=1, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2146, + serialized_end=2241, +) + + +_ACKNOWLEDGEREQUEST = _descriptor.Descriptor( + name='AcknowledgeRequest', + full_name='google.pubsub.v1.AcknowledgeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.AcknowledgeRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.AcknowledgeRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2243, + serialized_end=2302, +) + + +_STREAMINGPULLREQUEST = _descriptor.Descriptor( + name='StreamingPullRequest', + full_name='google.pubsub.v1.StreamingPullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.StreamingPullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids', index=3, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream_ack_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2305, + serialized_end=2469, +) + + +_STREAMINGPULLRESPONSE = _descriptor.Descriptor( + name='StreamingPullResponse', + full_name='google.pubsub.v1.StreamingPullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.StreamingPullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2471, + serialized_end=2556, +) + + +_CREATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='CreateSnapshotRequest', + full_name='google.pubsub.v1.CreateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.CreateSnapshotRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.CreateSnapshotRequest.subscription', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2558, + serialized_end=2617, +) + + +_SNAPSHOT = _descriptor.Descriptor( + name='Snapshot', + full_name='google.pubsub.v1.Snapshot', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Snapshot.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Snapshot.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='expire_time', full_name='google.pubsub.v1.Snapshot.expire_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2619, + serialized_end=2707, +) + + +_LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( + name='ListSnapshotsRequest', + full_name='google.pubsub.v1.ListSnapshotsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSnapshotsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSnapshotsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSnapshotsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2709, + serialized_end=2787, +) + + +_LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( + name='ListSnapshotsResponse', + full_name='google.pubsub.v1.ListSnapshotsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshots', full_name='google.pubsub.v1.ListSnapshotsResponse.snapshots', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSnapshotsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2789, + serialized_end=2884, +) + + +_DELETESNAPSHOTREQUEST = _descriptor.Descriptor( + name='DeleteSnapshotRequest', + full_name='google.pubsub.v1.DeleteSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.DeleteSnapshotRequest.snapshot', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2886, + serialized_end=2927, +) + + +_SEEKREQUEST = _descriptor.Descriptor( + name='SeekRequest', + full_name='google.pubsub.v1.SeekRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.SeekRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='time', full_name='google.pubsub.v1.SeekRequest.time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.SeekRequest.snapshot', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='target', full_name='google.pubsub.v1.SeekRequest.target', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2929, + serialized_end=3038, +) + + +_SEEKRESPONSE = _descriptor.Descriptor( + name='SeekResponse', + full_name='google.pubsub.v1.SeekResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3040, + serialized_end=3054, +) + +_PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE +_PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY +_PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_PUBLISHREQUEST.fields_by_name['messages'].message_type = _PUBSUBMESSAGE +_LISTTOPICSRESPONSE.fields_by_name['topics'].message_type = _TOPIC +_SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG +_SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG +_PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY +_RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['subscription'].message_type = _SUBSCRIPTION +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTSUBSCRIPTIONSRESPONSE.fields_by_name['subscriptions'].message_type = _SUBSCRIPTION +_MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG +_PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_SNAPSHOT.fields_by_name['expire_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LISTSNAPSHOTSRESPONSE.fields_by_name['snapshots'].message_type = _SNAPSHOT +_SEEKREQUEST.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['time']) +_SEEKREQUEST.fields_by_name['time'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['snapshot']) +_SEEKREQUEST.fields_by_name['snapshot'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC +DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE +DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST +DESCRIPTOR.message_types_by_name['PublishRequest'] = _PUBLISHREQUEST +DESCRIPTOR.message_types_by_name['PublishResponse'] = _PUBLISHRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicsRequest'] = _LISTTOPICSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicsResponse'] = _LISTTOPICSRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsRequest'] = _LISTTOPICSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsResponse'] = _LISTTOPICSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteTopicRequest'] = _DELETETOPICREQUEST +DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name['PushConfig'] = _PUSHCONFIG +DESCRIPTOR.message_types_by_name['ReceivedMessage'] = _RECEIVEDMESSAGE +DESCRIPTOR.message_types_by_name['GetSubscriptionRequest'] = _GETSUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['UpdateSubscriptionRequest'] = _UPDATESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsRequest'] = _LISTSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsResponse'] = _LISTSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSubscriptionRequest'] = _DELETESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ModifyPushConfigRequest'] = _MODIFYPUSHCONFIGREQUEST +DESCRIPTOR.message_types_by_name['PullRequest'] = _PULLREQUEST +DESCRIPTOR.message_types_by_name['PullResponse'] = _PULLRESPONSE +DESCRIPTOR.message_types_by_name['ModifyAckDeadlineRequest'] = _MODIFYACKDEADLINEREQUEST +DESCRIPTOR.message_types_by_name['AcknowledgeRequest'] = _ACKNOWLEDGEREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullRequest'] = _STREAMINGPULLREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullResponse'] = _STREAMINGPULLRESPONSE +DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT +DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST +DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSnapshotRequest'] = _DELETESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['SeekRequest'] = _SEEKREQUEST +DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE + +Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( + DESCRIPTOR = _TOPIC, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) + )) +_sym_db.RegisterMessage(Topic) + +PubsubMessage = _reflection.GeneratedProtocolMessageType('PubsubMessage', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUBSUBMESSAGE_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) + )) + , + DESCRIPTOR = _PUBSUBMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) + )) +_sym_db.RegisterMessage(PubsubMessage) +_sym_db.RegisterMessage(PubsubMessage.AttributesEntry) + +GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) + )) +_sym_db.RegisterMessage(GetTopicRequest) + +PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) + )) +_sym_db.RegisterMessage(PublishRequest) + +PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) + )) +_sym_db.RegisterMessage(PublishResponse) + +ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) + )) +_sym_db.RegisterMessage(ListTopicsRequest) + +ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) + )) +_sym_db.RegisterMessage(ListTopicsResponse) + +ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsRequest) + +ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsResponse) + +DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) + )) +_sym_db.RegisterMessage(DeleteTopicRequest) + +Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTION, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) + )) +_sym_db.RegisterMessage(Subscription) + +PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUSHCONFIG_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) + )) + , + DESCRIPTOR = _PUSHCONFIG, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) + )) +_sym_db.RegisterMessage(PushConfig) +_sym_db.RegisterMessage(PushConfig.AttributesEntry) + +ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( + DESCRIPTOR = _RECEIVEDMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) + )) +_sym_db.RegisterMessage(ReceivedMessage) + +GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) + )) +_sym_db.RegisterMessage(GetSubscriptionRequest) + +UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) + )) +_sym_db.RegisterMessage(UpdateSubscriptionRequest) + +ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListSubscriptionsRequest) + +ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListSubscriptionsResponse) + +DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) + )) +_sym_db.RegisterMessage(DeleteSubscriptionRequest) + +ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) + )) +_sym_db.RegisterMessage(ModifyPushConfigRequest) + +PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( + DESCRIPTOR = _PULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) + )) +_sym_db.RegisterMessage(PullRequest) + +PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( + DESCRIPTOR = _PULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) + )) +_sym_db.RegisterMessage(PullResponse) + +ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) + )) +_sym_db.RegisterMessage(ModifyAckDeadlineRequest) + +AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( + DESCRIPTOR = _ACKNOWLEDGEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) + )) +_sym_db.RegisterMessage(AcknowledgeRequest) + +StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) + )) +_sym_db.RegisterMessage(StreamingPullRequest) + +StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) + )) +_sym_db.RegisterMessage(StreamingPullResponse) + +CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) + )) +_sym_db.RegisterMessage(CreateSnapshotRequest) + +Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( + DESCRIPTOR = _SNAPSHOT, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) + )) +_sym_db.RegisterMessage(Snapshot) + +ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) + )) +_sym_db.RegisterMessage(ListSnapshotsRequest) + +ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) + )) +_sym_db.RegisterMessage(ListSnapshotsResponse) + +DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) + )) +_sym_db.RegisterMessage(DeleteSnapshotRequest) + +SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( + DESCRIPTOR = _SEEKREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) + )) +_sym_db.RegisterMessage(SeekRequest) + +SeekResponse = _reflection.GeneratedProtocolMessageType('SeekResponse', (_message.Message,), dict( + DESCRIPTOR = _SEEKRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) + )) +_sym_db.RegisterMessage(SeekResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1')) +_PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True +_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUSHCONFIG_ATTRIBUTESENTRY.has_options = True +_PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=Subscription.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=GetSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=UpdateSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=ListSubscriptionsRequest.SerializeToString, + response_deserializer=ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=PullRequest.SerializeToString, + response_deserializer=PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=StreamingPullRequest.SerializeToString, + response_deserializer=StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=ListSnapshotsRequest.SerializeToString, + response_deserializer=ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=CreateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=SeekRequest.SerializeToString, + response_deserializer=SeekResponse.FromString, + ) + + + class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=Subscription.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=GetSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=UpdateSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=ListSubscriptionsRequest.FromString, + response_serializer=ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=PullRequest.FromString, + response_serializer=PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=StreamingPullRequest.FromString, + response_serializer=StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=ListSnapshotsRequest.FromString, + response_serializer=ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=CreateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=SeekRequest.FromString, + response_serializer=SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=Topic.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=PublishRequest.SerializeToString, + response_deserializer=PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=GetTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=ListTopicsRequest.SerializeToString, + response_deserializer=ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=Topic.FromString, + response_serializer=Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=PublishRequest.FromString, + response_serializer=PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=GetTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=ListTopicsRequest.FromString, + response_serializer=ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=ListTopicSubscriptionsRequest.FromString, + response_serializer=ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaSubscriberServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaSubscriberStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSubscription.future = None + def GetSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration details of a subscription. + """ + raise NotImplementedError() + GetSubscription.future = None + def UpdateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + raise NotImplementedError() + UpdateSubscription.future = None + def ListSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching subscriptions. + """ + raise NotImplementedError() + ListSubscriptions.future = None + def DeleteSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + raise NotImplementedError() + DeleteSubscription.future = None + def ModifyAckDeadline(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + raise NotImplementedError() + ModifyAckDeadline.future = None + def Acknowledge(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + raise NotImplementedError() + Acknowledge.future = None + def Pull(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + raise NotImplementedError() + Pull.future = None + def StreamingPull(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + raise NotImplementedError() + def ModifyPushConfig(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + raise NotImplementedError() + ModifyPushConfig.future = None + def ListSnapshots(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the existing snapshots. + """ + raise NotImplementedError() + ListSnapshots.future = None + def CreateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSnapshot.future = None + def DeleteSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + raise NotImplementedError() + DeleteSnapshot.future = None + def Seek(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + raise NotImplementedError() + Seek.future = None + + + def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): face_utilities.unary_unary_inline(servicer.Acknowledge), + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): face_utilities.unary_unary_inline(servicer.CreateSnapshot), + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): face_utilities.unary_unary_inline(servicer.CreateSubscription), + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): face_utilities.unary_unary_inline(servicer.DeleteSnapshot), + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): face_utilities.unary_unary_inline(servicer.DeleteSubscription), + ('google.pubsub.v1.Subscriber', 'GetSubscription'): face_utilities.unary_unary_inline(servicer.GetSubscription), + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): face_utilities.unary_unary_inline(servicer.ListSnapshots), + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): face_utilities.unary_unary_inline(servicer.ListSubscriptions), + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): face_utilities.unary_unary_inline(servicer.ModifyAckDeadline), + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): face_utilities.unary_unary_inline(servicer.ModifyPushConfig), + ('google.pubsub.v1.Subscriber', 'Pull'): face_utilities.unary_unary_inline(servicer.Pull), + ('google.pubsub.v1.Subscriber', 'Seek'): face_utilities.unary_unary_inline(servicer.Seek), + ('google.pubsub.v1.Subscriber', 'StreamingPull'): face_utilities.stream_stream_inline(servicer.StreamingPull), + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): face_utilities.unary_unary_inline(servicer.UpdateSubscription), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.FromString, + } + cardinalities = { + 'Acknowledge': cardinality.Cardinality.UNARY_UNARY, + 'CreateSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'CreateSubscription': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSubscription': cardinality.Cardinality.UNARY_UNARY, + 'GetSubscription': cardinality.Cardinality.UNARY_UNARY, + 'ListSnapshots': cardinality.Cardinality.UNARY_UNARY, + 'ListSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ModifyAckDeadline': cardinality.Cardinality.UNARY_UNARY, + 'ModifyPushConfig': cardinality.Cardinality.UNARY_UNARY, + 'Pull': cardinality.Cardinality.UNARY_UNARY, + 'Seek': cardinality.Cardinality.UNARY_UNARY, + 'StreamingPull': cardinality.Cardinality.STREAM_STREAM, + 'UpdateSubscription': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Subscriber', cardinalities, options=stub_options) + + + class BetaPublisherServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaPublisherStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates the given topic with the given name. + """ + raise NotImplementedError() + CreateTopic.future = None + def Publish(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + raise NotImplementedError() + Publish.future = None + def GetTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration of a topic. + """ + raise NotImplementedError() + GetTopic.future = None + def ListTopics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching topics. + """ + raise NotImplementedError() + ListTopics.future = None + def ListTopicSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the name of the subscriptions for this topic. + """ + raise NotImplementedError() + ListTopicSubscriptions.future = None + def DeleteTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + raise NotImplementedError() + DeleteTopic.future = None + + + def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): face_utilities.unary_unary_inline(servicer.CreateTopic), + ('google.pubsub.v1.Publisher', 'DeleteTopic'): face_utilities.unary_unary_inline(servicer.DeleteTopic), + ('google.pubsub.v1.Publisher', 'GetTopic'): face_utilities.unary_unary_inline(servicer.GetTopic), + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): face_utilities.unary_unary_inline(servicer.ListTopicSubscriptions), + ('google.pubsub.v1.Publisher', 'ListTopics'): face_utilities.unary_unary_inline(servicer.ListTopics), + ('google.pubsub.v1.Publisher', 'Publish'): face_utilities.unary_unary_inline(servicer.Publish), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.FromString, + } + cardinalities = { + 'CreateTopic': cardinality.Cardinality.UNARY_UNARY, + 'DeleteTopic': cardinality.Cardinality.UNARY_UNARY, + 'GetTopic': cardinality.Cardinality.UNARY_UNARY, + 'ListTopicSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ListTopics': cardinality.Cardinality.UNARY_UNARY, + 'Publish': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Publisher', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py new file mode 100644 index 000000000000..5a970cbc77ab --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py @@ -0,0 +1,461 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.pubsub.v1.pubsub_pb2 as google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.FromString, + ) + + +class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) From 077141612359db7ff19adb20ad032b1bbe4dc947 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 1 Jun 2017 10:09:36 -0700 Subject: [PATCH 14/86] Do GAPIC changeout on base branch. --- pubsub/google/cloud/pubsub/__init__.py | 34 - pubsub/google/cloud/pubsub/_gax.py | 796 --------------------- pubsub/google/cloud/pubsub/_helpers.py | 73 -- pubsub/google/cloud/pubsub/_http.py | 782 -------------------- pubsub/google/cloud/pubsub/client.py | 283 -------- pubsub/google/cloud/pubsub/iam.py | 138 ---- pubsub/google/cloud/pubsub/message.py | 91 --- pubsub/google/cloud/pubsub/snapshot.py | 140 ---- pubsub/google/cloud/pubsub/subscription.py | 590 --------------- pubsub/google/cloud/pubsub/topic.py | 551 -------------- 10 files changed, 3478 deletions(-) delete mode 100644 pubsub/google/cloud/pubsub/__init__.py delete mode 100644 pubsub/google/cloud/pubsub/_gax.py delete mode 100644 pubsub/google/cloud/pubsub/_helpers.py delete mode 100644 pubsub/google/cloud/pubsub/_http.py delete mode 100644 pubsub/google/cloud/pubsub/client.py delete mode 100644 pubsub/google/cloud/pubsub/iam.py delete mode 100644 pubsub/google/cloud/pubsub/message.py delete mode 100644 pubsub/google/cloud/pubsub/snapshot.py delete mode 100644 pubsub/google/cloud/pubsub/subscription.py delete mode 100644 pubsub/google/cloud/pubsub/topic.py diff --git a/pubsub/google/cloud/pubsub/__init__.py b/pubsub/google/cloud/pubsub/__init__.py deleted file mode 100644 index 070e8243bf2b..000000000000 --- a/pubsub/google/cloud/pubsub/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google Cloud Pubsub API wrapper. - -The main concepts with this API are: - -- :class:`~google.cloud.pubsub.topic.Topic` represents an endpoint to which - messages can be published using the Cloud Storage Pubsub API. - -- :class:`~google.cloud.pubsub.subscription.Subscription` represents a named - subscription (either pull or push) to a topic. -""" - - -from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-pubsub').version - -from google.cloud.pubsub.client import Client -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -__all__ = ['__version__', 'Client', 'Subscription', 'Topic'] diff --git a/pubsub/google/cloud/pubsub/_gax.py b/pubsub/google/cloud/pubsub/_gax.py deleted file mode 100644 index 730192755221..000000000000 --- a/pubsub/google/cloud/pubsub/_gax.py +++ /dev/null @@ -1,796 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GAX wrapper for Pubsub API requests.""" - -import functools - -from google.cloud.gapic.pubsub.v1.publisher_client import PublisherClient -from google.cloud.gapic.pubsub.v1.subscriber_client import SubscriberClient -from google.gax import CallOptions -from google.gax import INITIAL_PAGE -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from google.protobuf.json_format import MessageToDict -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PubsubMessage -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig -from grpc import insecure_channel -from grpc import StatusCode - -from google.cloud._helpers import _to_bytes -from google.cloud._helpers import _pb_timestamp_to_rfc3339 -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud._helpers import make_secure_channel -from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import NotFound -from google.cloud.iterator import GAXIterator -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type gax_api: :class:`.publisher_client.PublisherClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_topics(self, project, page_size=0, page_token=None): - """List topics for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_topics( - path, page_size=page_size, options=options) - return GAXIterator(self._client, page_iter, _item_to_topic) - - def topic_create(self, topic_path): - """API call: create a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the topic already - exists - """ - try: - topic_pb = self._gax_api.create_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - raise Conflict(topic_path) - raise - return {'name': topic_pb.name} - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - try: - topic_pb = self._gax_api.get_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return {'name': topic_pb.name} - - def topic_delete(self, topic_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - """ - try: - self._gax_api.delete_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - def topic_publish(self, topic_path, messages, timeout=30): - """API call: publish one or more messages to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :type timeout: int - :param timeout: (Optional) Timeout seconds. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - options = CallOptions(is_bundling=False, timeout=timeout) - message_pbs = [_message_pb_from_mapping(message) - for message in messages] - try: - result = self._gax_api.publish(topic_path, message_pbs, - options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return result.message_ids - - def topic_list_subscriptions(self, topic, page_size=0, page_token=None): - """API call: list subscriptions bound to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - :raises: :exc:`~google.cloud.exceptions.NotFound` if the topic does - not exist. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - topic_path = topic.full_name - try: - page_iter = self._gax_api.list_topic_subscriptions( - topic_path, page_size=page_size, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - iterator = GAXIterator(self._client, page_iter, - _item_to_subscription_for_topic) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type gax_api: :class:`.publisher_client.SubscriberClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_subscriptions(self, project, page_size=0, page_token=None): - """List subscriptions for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_subscriptions( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return GAXIterator(self._client, page_iter, item_to_value) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - if push_endpoint is not None: - push_config = PushConfig(push_endpoint=push_endpoint) - else: - push_config = None - - if message_retention_duration is not None: - message_retention_duration = _timedelta_to_duration_pb( - message_retention_duration) - - try: - sub_pb = self._gax_api.create_subscription( - subscription_path, topic_path, - push_config=push_config, ack_deadline_seconds=ack_deadline, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - raise Conflict(topic_path) - raise - return MessageToDict(sub_pb) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - try: - sub_pb = self._gax_api.get_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(sub_pb) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - try: - self._gax_api.delete_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - push_config = PushConfig(push_endpoint=push_endpoint) - try: - self._gax_api.modify_push_config(subscription_path, push_config) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to pull from, in - format ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - try: - response_pb = self._gax_api.pull( - subscription_path, max_messages, - return_immediately=return_immediately) - except GaxError as exc: - code = exc_to_code(exc.cause) - if code == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - elif code == StatusCode.DEADLINE_EXCEEDED: - # NOTE: The JSON-over-HTTP API returns a 200 with an empty - # response when ``return_immediately`` is ``False``, so - # we "mutate" the gRPC error into a non-error to conform. - if not return_immediately: - return [] - raise - return [_received_message_pb_to_mapping(rmpb) - for rmpb in response_pb.received_messages] - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - try: - self._gax_api.acknowledge(subscription_path, ack_ids) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - try: - self._gax_api.modify_ack_deadline( - subscription_path, ack_ids, ack_deadline) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: :class:`.timestamp_pb2.Timestamp` - :param time: The time to seek to. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - try: - self._gax_api.seek(subscription_path, time=time, snapshot=snapshot) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def list_snapshots(self, project, page_size=0, page_token=None): - """List snapshots for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_snapshots( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return GAXIterator(self._client, page_iter, item_to_value) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the snapshot - already exists - :raises: :exc:`google.cloud.exceptions.NotFound` if the subscription - does not exist - """ - try: - snapshot_pb = self._gax_api.create_snapshot( - snapshot_path, subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: - raise Conflict(snapshot_path) - elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(snapshot_pb) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :raises: :exc:`google.cloud.exceptions.NotFound` if the snapshot does - not exist - """ - try: - self._gax_api.delete_snapshot(snapshot_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(snapshot_path) - raise - - -def _message_pb_from_mapping(message): - """Helper for :meth:`_PublisherAPI.topic_publish`. - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return PubsubMessage(data=_to_bytes(message['data']), - attributes=message['attributes']) - - -def _message_pb_to_mapping(message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'messageId': message_pb.message_id, - 'data': message_pb.data, - 'attributes': message_pb.attributes, - 'publishTime': _pb_timestamp_to_rfc3339(message_pb.publish_time), - } - - -def _received_message_pb_to_mapping(received_message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'ackId': received_message_pb.ack_id, - 'message': _message_pb_to_mapping( - received_message_pb.message), - } - - -def make_gax_publisher_api(credentials=None, host=None): - """Create an instance of the GAX Publisher API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.publisher_client.PublisherClient` - :returns: A publisher API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - PublisherClient.SERVICE_ADDRESS) - return PublisherClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def make_gax_subscriber_api(credentials=None, host=None): - """Create an instance of the GAX Subscriber API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.subscriber_client.SubscriberClient` - :returns: A subscriber API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - SubscriberClient.SERVICE_ADDRESS) - return SubscriberClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def _item_to_topic(iterator, resource): - """Convert a protobuf topic to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: :class:`.pubsub_pb2.Topic` - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr( - {'name': resource.name}, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, sub_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Subscription` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(sub_pb) - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, snapshot_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Snapshot` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(snapshot_pb) - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/pubsub/google/cloud/pubsub/_helpers.py b/pubsub/google/cloud/pubsub/_helpers.py deleted file mode 100644 index 2f021f20ab3e..000000000000 --- a/pubsub/google/cloud/pubsub/_helpers.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper functions for shared behavior.""" - -import re - -from google.cloud._helpers import _name_from_project_path - - -_TOPIC_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /topics/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -_SUBSCRIPTION_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /subscriptions/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -def topic_name_from_path(path, project): - """Validate a topic URI path and get the topic name. - - :type path: str - :param path: URI path for a topic API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: Topic name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _TOPIC_TEMPLATE) - - -def subscription_name_from_path(path, project): - """Validate a subscription URI path and get the subscription name. - - :type path: str - :param path: URI path for a subscription API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: subscription name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _SUBSCRIPTION_TEMPLATE) diff --git a/pubsub/google/cloud/pubsub/_http.py b/pubsub/google/cloud/pubsub/_http.py deleted file mode 100644 index 0c059df7453a..000000000000 --- a/pubsub/google/cloud/pubsub/_http.py +++ /dev/null @@ -1,782 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Interact with Google Cloud Pub/Sub via JSON-over-HTTP.""" - -import base64 -import copy -import functools -import os - -from google.cloud import _http -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.iterator import HTTPIterator - -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - - -PUBSUB_API_HOST = 'pubsub.googleapis.com' -"""Pub / Sub API request host.""" - -_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) - - -class Connection(_http.JSONConnection): - """A connection to Google Cloud Pub/Sub via the JSON REST API. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns the current connection. - """ - - API_BASE_URL = 'https://' + PUBSUB_API_HOST - """The base of the API call URL.""" - - API_VERSION = 'v1' - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' - """A template for the URL of a particular API call.""" - - _EXTRA_HEADERS = { - _http.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - - def __init__(self, client): - super(Connection, self).__init__(client) - emulator_host = os.getenv(PUBSUB_EMULATOR) - if emulator_host is None: - self.host = self.__class__.API_BASE_URL - self.api_base_url = self.__class__.API_BASE_URL - self.in_emulator = False - else: - self.host = emulator_host - self.api_base_url = 'http://' + emulator_host - self.in_emulator = True - - def build_api_url(self, path, query_params=None, - api_base_url=None, api_version=None): - """Construct an API url given a few components, some optional. - - Typically, you shouldn't need to use this method. - - :type path: str - :param path: The path to the resource. - - :type query_params: dict or list - :param query_params: A dictionary of keys and values (or list of - key-value pairs) to insert into the query - string of the URL. - - :type api_base_url: str - :param api_base_url: The base URL for the API endpoint. - Typically you won't have to provide this. - - :type api_version: str - :param api_version: The version of the API to call. - Typically you shouldn't provide this and instead - use the default for the library. - - :rtype: str - :returns: The URL assembled from the pieces provided. - """ - if api_base_url is None: - api_base_url = self.api_base_url - return super(Connection, self.__class__).build_api_url( - path, query_params=query_params, - api_base_url=api_base_url, api_version=api_version) - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_topics(self, project, page_size=None, page_token=None): - """API call: list topics for a given project - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current client. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/topics' % (project,) - - return HTTPIterator( - client=self._client, path=path, item_to_value=_item_to_topic, - items_key='topics', page_token=page_token, - extra_params=extra_params) - - def topic_create(self, topic_path): - """API call: create a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: the fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='PUT', path='/%s' % (topic_path,)) - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='GET', path='/%s' % (topic_path,)) - - def topic_delete(self, topic_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - """ - self.api_request(method='DELETE', path='/%s' % (topic_path,)) - - def topic_publish(self, topic_path, messages): - """API call: publish one or more messages to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - """ - messages_to_send = copy.deepcopy(messages) - _transform_messages_base64(messages_to_send, _base64_unicode) - data = {'messages': messages_to_send} - response = self.api_request( - method='POST', path='/%s:publish' % (topic_path,), data=data) - return response['messageIds'] - - def topic_list_subscriptions(self, topic, page_size=None, page_token=None): - """API call: list subscriptions bound to a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: list of strings - :returns: fully-qualified names of subscriptions for the supplied - topic. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/%s/subscriptions' % (topic.full_name,) - - iterator = HTTPIterator( - client=self._client, path=path, - item_to_value=_item_to_subscription_for_topic, - items_key='subscriptions', - page_token=page_token, extra_params=extra_params) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_subscriptions(self, project, page_size=None, page_token=None): - """API call: list subscriptions for a given project - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/subscriptions' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return HTTPIterator( - client=self._client, path=path, item_to_value=item_to_value, - items_key='subscriptions', page_token=page_token, - extra_params=extra_params) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - resource = {'topic': topic_path} - - if ack_deadline is not None: - resource['ackDeadlineSeconds'] = ack_deadline - - if push_endpoint is not None: - resource['pushConfig'] = {'pushEndpoint': push_endpoint} - - if retain_acked_messages is not None: - resource['retainAckedMessages'] = retain_acked_messages - - if message_retention_duration is not None: - pb = _timedelta_to_duration_pb(message_retention_duration) - resource['messageRetentionDuration'] = { - 'seconds': pb.seconds, - 'nanos': pb.nanos - } - - return self.api_request(method='PUT', path=path, data=resource) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - return self.api_request(method='GET', path=path) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - path = '/%s' % (subscription_path,) - self.api_request(method='DELETE', path=path) - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - path = '/%s:modifyPushConfig' % (subscription_path,) - resource = {'pushConfig': {'pushEndpoint': push_endpoint}} - self.api_request(method='POST', path=path, data=resource) - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - path = '/%s:pull' % (subscription_path,) - data = { - 'returnImmediately': return_immediately, - 'maxMessages': max_messages, - } - response = self.api_request(method='POST', path=path, data=data) - messages = response.get('receivedMessages', ()) - _transform_messages_base64(messages, base64.b64decode, 'message') - return messages - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - path = '/%s:acknowledge' % (subscription_path,) - data = { - 'ackIds': ack_ids, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - path = '/%s:modifyAckDeadline' % (subscription_path,) - data = { - 'ackIds': ack_ids, - 'ackDeadlineSeconds': ack_deadline, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: str - :param time: The time to seek to, in RFC 3339 format. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - path = '/%s:seek' % (subscription_path,) - data = {} - if time is not None: - data['time'] = time - if snapshot is not None: - data['snapshot'] = snapshot - self.api_request(method='POST', path=path, data=data) - - def list_snapshots(self, project, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/snapshots' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return HTTPIterator( - client=self._client, path=path, item_to_value=item_to_value, - items_key='snapshots', page_token=page_token, - extra_params=extra_params) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - """ - path = '/%s' % (snapshot_path,) - data = {'subscription': subscription_path} - return self.api_request(method='PUT', path=path, data=data) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - """ - path = '/%s' % (snapshot_path,) - self.api_request(method='DELETE', path=path) - - -class _IAMPolicyAPI(object): - """Helper mapping IAM policy-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self.api_request = client._connection.api_request - - def get_iam_policy(self, target_path): - """API call: fetch the IAM policy for the target - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - path = '/%s:getIamPolicy' % (target_path,) - return self.api_request(method='GET', path=path) - - def set_iam_policy(self, target_path, policy): - """API call: update the IAM policy for the target - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :type policy: dict - :param policy: the new policy resource. - - :rtype: dict - :returns: the resource returned by the ``setIamPolicy`` API request. - """ - wrapped = {'policy': policy} - path = '/%s:setIamPolicy' % (target_path,) - return self.api_request(method='POST', path=path, data=wrapped) - - def test_iam_permissions(self, target_path, permissions): - """API call: test permissions - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - :type target_path: str - :param target_path: the path of the target object. - - :type permissions: list of string - :param permissions: the permissions to check - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - wrapped = {'permissions': permissions} - path = '/%s:testIamPermissions' % (target_path,) - resp = self.api_request(method='POST', path=path, data=wrapped) - return resp.get('permissions', []) - - -def _base64_unicode(value): - """Helper to base64 encode and make JSON serializable. - - :type value: str - :param value: String value to be base64 encoded and made serializable. - - :rtype: str - :returns: Base64 encoded string/unicode value. - """ - as_bytes = base64.b64encode(value) - return as_bytes.decode('ascii') - - -def _transform_messages_base64(messages, transform, key=None): - """Helper for base64 encoding and decoding messages. - - :type messages: list - :param messages: List of dictionaries with message data. - - :type transform: :class:`~types.FunctionType` - :param transform: Function to encode/decode the message data. - - :type key: str - :param key: Index to access messages. - """ - for message in messages: - if key is not None: - message = message[key] - if 'data' in message: - message['data'] = transform(message['data']) - - -def _item_to_topic(iterator, resource): - """Convert a JSON topic to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr(resource, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/pubsub/google/cloud/pubsub/client.py b/pubsub/google/cloud/pubsub/client.py deleted file mode 100644 index 17bb67cb66e2..000000000000 --- a/pubsub/google/cloud/pubsub/client.py +++ /dev/null @@ -1,283 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google Cloud Pub/Sub API.""" - -import os - -from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.pubsub._http import Connection -from google.cloud.pubsub._http import _PublisherAPI as JSONPublisherAPI -from google.cloud.pubsub._http import _SubscriberAPI as JSONSubscriberAPI -from google.cloud.pubsub._http import _IAMPolicyAPI -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -try: - from google.cloud.pubsub._gax import _PublisherAPI as GAXPublisherAPI - from google.cloud.pubsub._gax import _SubscriberAPI as GAXSubscriberAPI - from google.cloud.pubsub._gax import make_gax_publisher_api - from google.cloud.pubsub._gax import make_gax_subscriber_api -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - GAXPublisherAPI = None - GAXSubscriberAPI = None - make_gax_publisher_api = None - make_gax_subscriber_api = None -else: - _HAVE_GRPC = True - - -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC - - -class Client(ClientWithProject): - """Client to bundle configuration needed for API requests. - - :type project: str - :param project: the project which the client acts on behalf of. Will be - passed when creating a topic. If not passed, - falls back to the default inferred from the environment. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``_http`` object is - passed), falls back to the default inferred from the - environment. - - :type _http: :class:`~httplib2.Http` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type _use_grpc: bool - :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` - environment variable. - This parameter should be considered private, and could - change in the future. - """ - - _publisher_api = None - _subscriber_api = None - _iam_policy_api = None - - SCOPE = ('https://www.googleapis.com/auth/pubsub', - 'https://www.googleapis.com/auth/cloud-platform') - """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" - - def __init__(self, project=None, credentials=None, - _http=None, _use_grpc=None): - super(Client, self).__init__( - project=project, credentials=credentials, _http=_http) - self._connection = Connection(self) - if _use_grpc is None: - self._use_grpc = _USE_GRPC - else: - self._use_grpc = _use_grpc - - @property - def publisher_api(self): - """Helper for publisher-related API calls.""" - if self._publisher_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_publisher_api( - host=self._connection.host) - else: - generated = make_gax_publisher_api( - credentials=self._credentials) - self._publisher_api = GAXPublisherAPI(generated, self) - else: - self._publisher_api = JSONPublisherAPI(self) - return self._publisher_api - - @property - def subscriber_api(self): - """Helper for subscriber-related API calls.""" - if self._subscriber_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_subscriber_api( - host=self._connection.host) - else: - generated = make_gax_subscriber_api( - credentials=self._credentials) - self._subscriber_api = GAXSubscriberAPI(generated, self) - else: - self._subscriber_api = JSONSubscriberAPI(self) - return self._subscriber_api - - @property - def iam_policy_api(self): - """Helper for IAM policy-related API calls.""" - if self._iam_policy_api is None: - self._iam_policy_api = _IAMPolicyAPI(self) - return self._iam_policy_api - - def list_topics(self, page_size=None, page_token=None): - """List topics for the project associated with this client. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_list_topics] - :end-before: [END client_list_topics] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - api = self.publisher_api - return api.list_topics( - self.project, page_size, page_token) - - def list_subscriptions(self, page_size=None, page_token=None): - """List subscriptions for the project associated with this client. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_list_subscriptions] - :end-before: [END client_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current client. - """ - api = self.subscriber_api - return api.list_subscriptions( - self.project, page_size, page_token) - - def list_snapshots(self, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - api = self.subscriber_api - return api.list_snapshots( - self.project, page_size, page_token) - - def topic(self, name, timestamp_messages=False): - """Creates a topic bound to the current client. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_topic] - :end-before: [END client_topic] - - :type name: str - :param name: the name of the topic to be constructed. - - :type timestamp_messages: bool - :param timestamp_messages: To be passed to ``Topic`` constructor. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic created with the current client. - """ - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current client. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_subscription] - :end-before: [END client_subscription] - - :type name: str - :param name: the name of the subscription to be constructed. - - :type ack_deadline: int - :param ack_deadline: (Optional) The deadline (in seconds) by which - messages pulledfrom the back-end must be - acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. If unset, defaults to 7 days. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: Subscription created with the current client. - """ - return Subscription( - name, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, client=self) diff --git a/pubsub/google/cloud/pubsub/iam.py b/pubsub/google/cloud/pubsub/iam.py deleted file mode 100644 index 9c7e46af222a..000000000000 --- a/pubsub/google/cloud/pubsub/iam.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""PubSub API IAM policy definitions - -For allowed roles / permissions, see: -https://cloud.google.com/pubsub/access_control#permissions -""" - -import warnings - -# pylint: disable=unused-import -from google.cloud.iam import OWNER_ROLE # noqa - backward compat -from google.cloud.iam import EDITOR_ROLE # noqa - backward compat -from google.cloud.iam import VIEWER_ROLE # noqa - backward compat -# pylint: enable=unused-import -from google.cloud.iam import Policy as _BasePolicy -from google.cloud.iam import _ASSIGNMENT_DEPRECATED_MSG - -# Pubsub-specific IAM roles - -PUBSUB_ADMIN_ROLE = 'roles/pubsub.admin' -"""Role implying all rights to an object.""" - -PUBSUB_EDITOR_ROLE = 'roles/pubsub.editor' -"""Role implying rights to modify an object.""" - -PUBSUB_VIEWER_ROLE = 'roles/pubsub.viewer' -"""Role implying rights to access an object.""" - -PUBSUB_PUBLISHER_ROLE = 'roles/pubsub.publisher' -"""Role implying rights to publish to a topic.""" - -PUBSUB_SUBSCRIBER_ROLE = 'roles/pubsub.subscriber' -"""Role implying rights to subscribe to a topic.""" - - -# Pubsub-specific permissions - -PUBSUB_TOPICS_CONSUME = 'pubsub.topics.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_TOPICS_CREATE = 'pubsub.topics.create' -"""Permission: create topics.""" - -PUBSUB_TOPICS_DELETE = 'pubsub.topics.delete' -"""Permission: delete topics.""" - -PUBSUB_TOPICS_GET = 'pubsub.topics.get' -"""Permission: retrieve topics.""" - -PUBSUB_TOPICS_GET_IAM_POLICY = 'pubsub.topics.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_TOPICS_LIST = 'pubsub.topics.list' -"""Permission: list topics.""" - -PUBSUB_TOPICS_SET_IAM_POLICY = 'pubsub.topics.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_CONSUME = 'pubsub.subscriptions.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_SUBSCRIPTIONS_CREATE = 'pubsub.subscriptions.create' -"""Permission: create subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_DELETE = 'pubsub.subscriptions.delete' -"""Permission: delete subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET = 'pubsub.subscriptions.get' -"""Permission: retrieve subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY = 'pubsub.subscriptions.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_LIST = 'pubsub.subscriptions.list' -"""Permission: list subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_SET_IAM_POLICY = 'pubsub.subscriptions.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_UPDATE = 'pubsub.subscriptions.update' -"""Permission: update subscriptions.""" - - -class Policy(_BasePolicy): - """IAM Policy / Bindings. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding - """ - _OWNER_ROLES = (OWNER_ROLE, PUBSUB_ADMIN_ROLE) - """Roles mapped onto our ``owners`` attribute.""" - - _EDITOR_ROLES = (EDITOR_ROLE, PUBSUB_EDITOR_ROLE) - """Roles mapped onto our ``editors`` attribute.""" - - _VIEWER_ROLES = (VIEWER_ROLE, PUBSUB_VIEWER_ROLE) - """Roles mapped onto our ``viewers`` attribute.""" - - @property - def publishers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_PUBLISHER_ROLE, ())) - - @publishers.setter - def publishers(self, value): - """Update publishers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'publishers', PUBSUB_PUBLISHER_ROLE), - DeprecationWarning) - self[PUBSUB_PUBLISHER_ROLE] = value - - @property - def subscribers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_SUBSCRIBER_ROLE, ())) - - @subscribers.setter - def subscribers(self, value): - """Update subscribers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'subscribers', PUBSUB_SUBSCRIBER_ROLE), - DeprecationWarning) - self[PUBSUB_SUBSCRIBER_ROLE] = value diff --git a/pubsub/google/cloud/pubsub/message.py b/pubsub/google/cloud/pubsub/message.py deleted file mode 100644 index 6b93e3b890ed..000000000000 --- a/pubsub/google/cloud/pubsub/message.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -from google.cloud._helpers import _rfc3339_to_datetime - - -class Message(object): - """Messages can be published to a topic and received by subscribers. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage - - :type data: bytes - :param data: the payload of the message. - - :type message_id: str - :param message_id: An ID assigned to the message by the API. - - :type attributes: dict - :param attributes: - (Optional) Extra metadata associated by the publisher with the message. - """ - _service_timestamp = None - - def __init__(self, data, message_id, attributes=None): - self.data = data - self.message_id = message_id - self._attributes = attributes - - @property - def attributes(self): - """Lazily-constructed attribute dictionary.""" - if self._attributes is None: - self._attributes = {} - return self._attributes - - @property - def timestamp(self): - """Return sortable timestamp from attributes, if passed. - - Allows sorting messages in publication order (assuming consistent - clocks across all publishers). - - :rtype: :class:`datetime.datetime` - :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp - :raises: ValueError if timestamp not in ``attributes``, or if it does - not match the RFC 3339 format. - """ - stamp = self.attributes.get('timestamp') - if stamp is None: - raise ValueError('No timestamp') - return _rfc3339_to_datetime(stamp) - - @property - def service_timestamp(self): - """Return server-set timestamp. - - :rtype: str - :returns: timestamp (in UTC timezone) in RFC 3339 format - """ - return self._service_timestamp - - @classmethod - def from_api_repr(cls, api_repr): - """Factory: construct message from API representation. - - :type api_repr: dict - :param api_repr: (Optional) The API representation of the message - - :rtype: :class:`Message` - :returns: The message created from the response. - """ - data = api_repr.get('data', b'') - instance = cls( - data=data, message_id=api_repr['messageId'], - attributes=api_repr.get('attributes')) - instance._service_timestamp = api_repr.get('publishTime') - return instance diff --git a/pubsub/google/cloud/pubsub/snapshot.py b/pubsub/google/cloud/pubsub/snapshot.py deleted file mode 100644 index 557ea93818d6..000000000000 --- a/pubsub/google/cloud/pubsub/snapshot.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Snapshots.""" - -from google.cloud.pubsub._helpers import topic_name_from_path - - -class Snapshot(object): - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.snapshots.topic`` when topic has been deleted.""" - - def __init__(self, name, subscription=None, topic=None, client=None): - - num_kwargs = len( - [param for param in (subscription, topic, client) if param]) - if num_kwargs != 1: - raise TypeError( - "Pass only one of 'subscription', 'topic', 'client'.") - - self.name = name - self.topic = topic or getattr(subscription, 'topic', None) - self._subscription = subscription - self._client = client or getattr( - subscription, '_client', None) or topic._client - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a subscription given its API representation - - :type resource: dict - :param resource: snapshot resource representation returned from the - API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration. - - :type subscriptions: dict - :param subscriptions: - (Optional) A Subscription to which this snapshot belongs. If not - passed, the subscription will have a newly-created subscription. - Must have the same topic as the snapshot. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - if topic is None: - return cls(name, client=client) - return cls(name, topic=topic) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/snapshots/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - if not self._subscription: - raise RuntimeError( - 'Cannot create a snapshot not bound to a subscription') - - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_create(self.full_name, self._subscription.full_name) - - def delete(self, client=None): - """API call: delete the snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_delete(self.full_name) diff --git a/pubsub/google/cloud/pubsub/subscription.py b/pubsub/google/cloud/pubsub/subscription.py deleted file mode 100644 index 22f93246924c..000000000000 --- a/pubsub/google/cloud/pubsub/subscription.py +++ /dev/null @@ -1,590 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Subscriptions.""" - -import datetime - -from google.cloud.exceptions import NotFound -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.message import Message - - -class Subscription(object): - """Subscriptions receive messages published to their topics. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions - - :type name: str - :param name: the name of the subscription. - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: - (Optional) the topic to which the subscription belongs; if ``None``, - the subscription's topic has been deleted. - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. If - not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: - (Optional) The client to use. If not passed, falls back to the - ``client`` stored on the topic. - """ - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.subscriptions.topic`` when topic has been deleted. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions#Subscription.FIELDS.topic - """ - - def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, message_retention_duration=None, - client=None): - - if client is None and topic is None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - if client is not None and topic is not None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - self.name = name - self.topic = topic - self._client = client or topic._client - self.ack_deadline = ack_deadline - self.push_endpoint = push_endpoint - self.retain_acked_messages = retain_acked_messages - self.message_retention_duration = message_retention_duration - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for a topic. - - :type topics: dict - :param topics: - (Optional) A mapping of topic names -> topics. If not passed, the - subscription will have a newly-created topic. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - ack_deadline = resource.get('ackDeadlineSeconds') - push_config = resource.get('pushConfig', {}) - push_endpoint = push_config.get('pushEndpoint') - retain_acked_messages = resource.get('retainAckedMessages') - resource_duration = resource.get('duration', {}) - message_retention_duration = datetime.timedelta( - seconds=resource_duration.get('seconds', 0), - microseconds=resource_duration.get('nanos', 0) / 1000) - if topic is None: - return cls(name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - client=client) - return cls(name, topic=topic, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/subscriptions/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def auto_ack(self, return_immediately=False, max_messages=1, client=None): - """:class:`AutoAck` factory - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - - :rtype: :class:`AutoAck` - :returns: the instance created for the given ``ack_id`` and ``message`` - """ - return AutoAck(self, return_immediately, max_messages, client) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the subscription via a PUT request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_create] - :end-before: [END subscription_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_create( - self.full_name, self.topic.full_name, - ack_deadline=self.ack_deadline, push_endpoint=self.push_endpoint, - retain_acked_messages=self.retain_acked_messages, - message_retention_duration=self.message_retention_duration) - - def exists(self, client=None): - """API call: test existence of the subscription via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_exists] - :end-before: [END subscription_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: bool - :returns: Boolean indicating existence of the subscription. - """ - client = self._require_client(client) - api = client.subscriber_api - try: - api.subscription_get(self.full_name) - except NotFound: - return False - else: - return True - - def reload(self, client=None): - """API call: sync local subscription configuration via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :attr:`ack_deadline` and :attr:`push_endpoint` might never have - been set locally, or might have been updated by another client. This - method fetches their values from the server. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_reload] - :end-before: [END subscription_reload] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - data = api.subscription_get(self.full_name) - self.ack_deadline = data.get('ackDeadlineSeconds') - push_config = data.get('pushConfig', {}) - self.push_endpoint = push_config.get('pushEndpoint') - if self.topic is None and 'topic' in data: - topic_name = topic_name_from_path(data['topic'], client.project) - self.topic = client.topic(topic_name) - - def delete(self, client=None): - """API call: delete the subscription via a DELETE request. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_delete] - :end-before: [END subscription_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_delete(self.full_name) - - def modify_push_configuration(self, push_endpoint, client=None): - """API call: update the push endpoint for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_push_pull] - :end-before: [END subscription_push_pull] - - .. literalinclude:: snippets.py - :start-after: [START subscription_pull_push] - :end-before: [END subscription_pull_push] - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If None, the application must pull - messages. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_push_config(self.full_name, push_endpoint) - self.push_endpoint = push_endpoint - - def pull(self, return_immediately=False, max_messages=1, client=None): - """API call: retrieve messages for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_pull] - :end-before: [END subscription_pull] - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: list of (ack_id, message) tuples - :returns: sequence of tuples: ``ack_id`` is the ID to be used in a - subsequent call to :meth:`acknowledge`, and ``message`` - is an instance of - :class:`~google.cloud.pubsub.message.Message`. - """ - client = self._require_client(client) - api = client.subscriber_api - response = api.subscription_pull( - self.full_name, return_immediately, max_messages) - return [(info['ackId'], Message.from_api_repr(info['message'])) - for info in response] - - def acknowledge(self, ack_ids, client=None): - """API call: acknowledge retrieved messages for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/acknowledge - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_acknowledge] - :end-before: [END subscription_acknowledge] - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_acknowledge(self.full_name, ack_ids) - - def modify_ack_deadline(self, ack_ids, ack_deadline, client=None): - """API call: update acknowledgement deadline for a retrieved message. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being updated - - :type ack_deadline: int - :param ack_deadline: new deadline for the message, in seconds - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_ack_deadline( - self.full_name, ack_ids, ack_deadline) - - def snapshot(self, name, client=None): - """Creates a snapshot of this subscription. - - :type name: str - :param name: the name of the subscription - - :rtype: :class:`Snapshot` - :returns: The snapshot created with the passed in arguments. - """ - return Snapshot(name, subscription=self) - - def seek_snapshot(self, snapshot, client=None): - """API call: seek a subscription to a given snapshot - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type snapshot: :class:`Snapshot` - :param snapshot: The snapshot to seek to. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_seek(self.full_name, snapshot=snapshot.full_name) - - def seek_timestamp(self, timestamp, client=None): - """API call: seek a subscription to a given point in time - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type time: :class:`datetime.datetime` - :param time: The time to seek to. - """ - client = self._require_client(client) - timestamp = _datetime_to_rfc3339(timestamp) - api = client.subscriber_api - api.subscription_seek(self.full_name, time=timestamp) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_get_iam_policy] - :end-before: [END subscription_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the subscription. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_set_iam_policy] - :end-before: [END subscription_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_check_iam_permissions] - :end-before: [END subscription_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class AutoAck(dict): - """Wrapper for :meth:`Subscription.pull` results. - - Mapping, tracks messages still-to-be-acknowledged. - - When used as a context manager, acknowledges all messages still in the - mapping on `__exit__`. When processing the pulled messages, application - code MUST delete messages from the :class:`AutoAck` mapping which are not - successfully processed, e.g.: - - .. code-block: python - - with AutoAck(subscription) as ack: # calls ``subscription.pull`` - for ack_id, message in ack.items(): - try: - do_something_with(message): - except: - del ack[ack_id] - - :type subscription: :class:`Subscription` - :param subscription: subscription to be pulled. - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - """ - def __init__(self, subscription, - return_immediately=False, max_messages=1, client=None): - super(AutoAck, self).__init__() - self._subscription = subscription - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - - def __enter__(self): - items = self._subscription.pull( - self._return_immediately, self._max_messages, self._client) - self.update(items) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if self: - self._subscription.acknowledge(list(self), self._client) diff --git a/pubsub/google/cloud/pubsub/topic.py b/pubsub/google/cloud/pubsub/topic.py deleted file mode 100644 index f9a8c28a3a09..000000000000 --- a/pubsub/google/cloud/pubsub/topic.py +++ /dev/null @@ -1,551 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -import base64 -import json -import time - -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud._helpers import _NOW -from google.cloud._helpers import _to_bytes -from google.cloud.exceptions import NotFound -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.subscription import Subscription - - -class Topic(object): - """Topics are targets to which messages can be published. - - Subscribers then receive those messages. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics - - :type name: str - :param name: the name of the topic - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: A client which holds credentials and project configuration - for the topic (which requires a project). - - :type timestamp_messages: bool - :param timestamp_messages: If true, the topic will add a ``timestamp`` key - to the attributes of each published message: - the value will be an RFC 3339 timestamp. - """ - def __init__(self, name, client, timestamp_messages=False): - self.name = name - self._client = client - self.timestamp_messages = timestamp_messages - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current topic. - - Example: pull-mode subcription, default parameter values - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_defaults] - :end-before: [END topic_subscription_defaults] - - Example: pull-mode subcription, override ``ack_deadline`` default - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_ack90] - :end-before: [END topic_subscription_ack90] - - Example: push-mode subcription - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_push] - :end-before: [END topic_subscription_push] - - :type name: str - :param name: the name of the subscription - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: :class:`Subscription` - :returns: The subscription created with the passed in arguments. - """ - return Subscription( - name, self, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for the topic. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic parsed from ``resource``. - :raises: :class:`ValueError` if ``client`` is not ``None`` and the - project from the resource does not agree with the project - from the client. - """ - topic_name = topic_name_from_path(resource['name'], client.project) - return cls(topic_name, client=client) - - @property - def project(self): - """Project bound to the topic.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in topic / subscription APIs""" - return 'projects/%s/topics/%s' % (self.project, self.name) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the topic via a PUT request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_create] - :end-before: [END topic_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_create(topic_path=self.full_name) - - def exists(self, client=None): - """API call: test for the existence of the topic via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_exists] - :end-before: [END topic_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: bool - :returns: Boolean indicating existence of the topic. - """ - client = self._require_client(client) - api = client.publisher_api - - try: - api.topic_get(topic_path=self.full_name) - except NotFound: - return False - else: - return True - - def delete(self, client=None): - """API call: delete the topic via a DELETE request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_delete] - :end-before: [END topic_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_delete(topic_path=self.full_name) - - def _timestamp_message(self, attrs): - """Add a timestamp to ``attrs``, if the topic is so configured. - - If ``attrs`` already has the key, do nothing. - - Helper method for ``publish``/``Batch.publish``. - """ - if self.timestamp_messages and 'timestamp' not in attrs: - attrs['timestamp'] = _datetime_to_rfc3339(_NOW()) - - def publish(self, message, client=None, **attrs): - """API call: publish a message to a topic via a POST request - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - Example without message attributes: - - .. literalinclude:: snippets.py - :start-after: [START topic_publish_simple_message] - :end-before: [END topic_publish_simple_message] - - With message attributes: - - .. literalinclude:: snippets.py - :start-after: [START topic_publish_message_with_attrs] - :end-before: [END topic_publish_message_with_attrs] - - :type message: bytes - :param message: the message payload - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - - :rtype: str - :returns: message ID assigned by the server to the published message - """ - client = self._require_client(client) - api = client.publisher_api - - self._timestamp_message(attrs) - message_data = {'data': message, 'attributes': attrs} - message_ids = api.topic_publish(self.full_name, [message_data]) - return message_ids[0] - - def batch(self, client=None, **kwargs): - """Return a batch to use as a context manager. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_batch] - :end-before: [END topic_batch] - - .. note:: - - The only API request happens during the ``__exit__()`` of the topic - used as a context manager, and only if the block exits without - raising an exception. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type kwargs: dict - :param kwargs: Keyword arguments passed to the - :class:`~google.cloud.pubsub.topic.Batch` constructor. - - :rtype: :class:`Batch` - :returns: A batch to use as a context manager. - """ - client = self._require_client(client) - return Batch(self, client, **kwargs) - - def list_subscriptions(self, page_size=None, page_token=None, client=None): - """List subscriptions for the project associated with this client. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_list_subscriptions] - :end-before: [END topic_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`~google.cloud.iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - return api.topic_list_subscriptions(self, page_size, page_token) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the topic. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_get_iam_policy] - :end-before: [END topic_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the topic. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_set_iam_policy] - :end-before: [END topic_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_check_iam_permissions] - :end-before: [END topic_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class Batch(object): - """Context manager: collect messages to publish via a single API call. - - Helper returned by :meth:Topic.batch - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: the topic being published - - :param client: The client to use. - :type client: :class:`google.cloud.pubsub.client.Client` - - :param max_interval: The maximum interval, in seconds, before the batch - will automatically commit. Note that this does not - run a background loop; it just checks when each - message is published. Therefore, this is intended - for situations where messages are published at - reasonably regular intervals. Defaults to infinity - (off). - :type max_interval: float - - :param max_messages: The maximum number of messages to hold in the batch - before automatically commiting. Defaults to infinity - (off). - :type max_messages: float - - :param max_size: The maximum size that the serialized messages can be - before automatically commiting. Defaults to 9 MB - (slightly less than the API limit). - :type max_size: int - """ - _INFINITY = float('inf') - - def __init__(self, topic, client, max_interval=_INFINITY, - max_messages=_INFINITY, max_size=1024 * 1024 * 9): - self.topic = topic - self.client = client - self.messages = [] - self.message_ids = [] - - # Set the autocommit rules. If the interval or number of messages - # is exceeded, then the .publish() method will imply a commit. - self._max_interval = max_interval - self._max_messages = max_messages - self._max_size = max_size - - # Set up the initial state, initializing messages, the starting - # timestamp, etc. - self._reset_state() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - - def __iter__(self): - return iter(self.message_ids) - - def _reset_state(self): - """Reset the state of this batch.""" - - del self.messages[:] - self._start_timestamp = time.time() - self._current_size = 0 - - def publish(self, message, **attrs): - """Emulate publishing a message, but save it. - - :type message: bytes - :param message: the message payload - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - """ - self.topic._timestamp_message(attrs) - - # Append the message to the list of messages.. - item = {'attributes': attrs, 'data': message} - self.messages.append(item) - - # Determine the approximate size of the message, and increment - # the current batch size appropriately. - encoded = base64.b64encode(_to_bytes(message)) - encoded += base64.b64encode( - json.dumps(attrs, ensure_ascii=False).encode('utf8'), - ) - self._current_size += len(encoded) - - # If too much time has elapsed since the first message - # was added, autocommit. - now = time.time() - if now - self._start_timestamp > self._max_interval: - self.commit() - return - - # If the number of messages on the list is greater than the - # maximum allowed, autocommit (with the batch's client). - if len(self.messages) >= self._max_messages: - self.commit() - return - - # If we have reached the max size, autocommit. - if self._current_size >= self._max_size: - self.commit() - return - - def commit(self, client=None): - """Send saved messages as a single API call. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - """ - if not self.messages: - return - - if client is None: - client = self.client - api = client.publisher_api - message_ids = api.topic_publish(self.topic.full_name, self.messages[:]) - self.message_ids.extend(message_ids) - self._reset_state() From 7dd719f4c68943221b2d6f6c7e2fbc0bc3b9bbf8 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 09:58:05 -0700 Subject: [PATCH 15/86] Clean up a couple small things. --- .../google/cloud/pubsub_v1/publisher/batch.py | 68 ++++++++++--------- .../cloud/pubsub_v1/publisher/client.py | 2 +- 2 files changed, 36 insertions(+), 34 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py index 2e8744eb5289..f3618d6af164 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch.py @@ -36,22 +36,22 @@ class Batch(object): message to be published is received; subsequent messages are added to that batch until the process of actual publishing _starts_. - Once this occurs, any new messages sent to ``publish`` open a new batch. + Once this occurs, any new messages sent to :meth:`publish` open a new + batch. If you are using this library, you most likely do not need to instantiate batch objects directly; they will be created for you. If you want to change the actual batching settings, see the ``batching`` argument on - :class:`google.cloud.pubsub_v1.PublisherClient`. + :class:`~.pubsub_v1.PublisherClient`. Args: - client (:class:`google.cloud.pubsub_v1.PublisherClient`): The - publisher client used to create this batch. Batch settings are - inferred from this. + client (~.pubsub_v1.PublisherClient): The publisher client used to + create this batch. topic (str): The topic. The format for this is ``projects/{project}/topics/{topic}``. - settings (:class:`google.cloud.pubsub_v1.types.Batching`): The - settings for batch publishing. These should be considered - immutable once the batch has been opened. + settings (~.pubsub_v1.types.Batching): The settings for batch + publishing. These should be considered immutable once the batch + has been opened. autocommit (bool): Whether to autocommit the batch when the time has elapsed. Defaults to True unless ``settings.max_latency`` is inf. @@ -62,20 +62,20 @@ def __init__(self, client, topic, settings, autocommit=True): # Create a namespace that is owned by the client manager; this # is necessary to be able to have these values be communicable between # processes. - self._ = self.manager.Namespace() - self._.futures = self.manager.list() - self._.messages = self.manager.list() - self._.message_ids = self.manager.dict() - self._.settings = settings - self._.status = 'accepting messages' - self._.topic = topic + self._shared = self.manager.Namespace() + self._shared.futures = self.manager.list() + self._shared.messages = self.manager.list() + self._shared.message_ids = self.manager.dict() + self._shared.settings = settings + self._shared.status = 'accepting messages' + self._shared.topic = topic # This is purely internal tracking. self._process = None # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. - if autocommit and self._.settings.max_latency < float('inf'): + if autocommit and self._shared.settings.max_latency < float('inf'): self._process = self._client.thread_class(target=self.monitor) self._process.start() @@ -84,10 +84,9 @@ def client(self): """Return the client that created this batch. Returns: - :class:~`pubsub_v1.client.Client`: The client that created this - batch. + ~.pubsub_v1.client.Client: The client that created this batch. """ - return self._client + return self._sharedclient @property def manager(self): @@ -107,7 +106,7 @@ def status(self): str: The status of this batch. All statuses are human-readable, all-lowercase strings. """ - return self._.status + return self._shared.status def commit(self): """Actually publish all of the messages on the active batch. @@ -117,12 +116,15 @@ def commit(self): completion. """ # Update the status. - self._.status = 'in-flight' + self._shared.status = 'in-flight' # Begin the request to publish these messages. - if len(self._.messages) == 0: + if len(self._shared.messages) == 0: raise Exception('Empty queue') - response = self._client.api.publish(self._.topic, self._.messages) + response = self._client.api.publish( + self._shared.topic, + self._shared.messages, + ) # FIXME (lukesneeringer): Check for failures; retry. @@ -131,7 +133,7 @@ def commit(self): # Sanity check: If the number of message IDs is not equal to the # number of futures I have, then something went wrong. - if len(response.message_ids) != len(self._.futures): + if len(response.message_ids) != len(self._shared.futures): raise exceptions.PublishError( 'Some messages were not successfully published.', ) @@ -139,9 +141,9 @@ def commit(self): # Iterate over the futures on the queue and return the response IDs. # We are trusting that there is a 1:1 mapping, and raise an exception # if not. - self._.status = 'success' - for message_id, fut in zip(response.message_ids, self._.futures): - self._.message_ids[hash(fut)] = message_id + self._shared.status = 'success' + for message_id, fut in zip(response.message_ids, self._shared.futures): + self._shared.message_ids[hash(fut)] = message_id fut._trigger() def monitor(self): @@ -154,11 +156,11 @@ def monitor(self): # in a separate thread. # # Sleep for however long we should be waiting. - time.sleep(self._.settings.max_latency) + time.sleep(self._shared.settings.max_latency) # If, in the intervening period, the batch started to be committed, # then no-op at this point. - if self._.status != 'accepting messages': + if self._shared.status != 'accepting messages': return # Commit. @@ -194,8 +196,8 @@ def publish(self, data, **attrs): ``attrs`` are not either a ``str`` or ``bytes``. Returns: - Future: An object conforming to the ``concurrent.futures.Future`` - interface. + ~.pubsub_v1.publisher.future.Future: An object conforming to the + :class:`concurrent.futures.Future` interface. """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. @@ -214,14 +216,14 @@ def publish(self, data, **attrs): 'be sent as text strings.') # Store the actual message in the batch's message queue. - self._.messages.append( + self._shared.messages.append( types.PubsubMessage(data=data, attributes=attrs), ) # Return a Future. That future needs to be aware of the status # of this batch. f = future.Future(self._) - self._.futures.append(f) + self._shared.futures.append(f) return f diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index d3a1ecb6264e..4e10630067b5 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -148,7 +148,7 @@ def publish(self, topic, data, **attrs): >>> response = client.publish(topic, data, username='guido') Args: - topic (:class:~`pubsub_v1.types.Topic`): The topic to publish + topic (~.pubsub_v1.types.Topic): The topic to publish messages to. data (bytes): A bytestring representing the message body. This must be a bytestring (a text string will raise TypeError). From c1042ac21bbda2b37f084106f83d2ad6dd04ac01 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 10:01:40 -0700 Subject: [PATCH 16/86] A couple more small fixes. --- pubsub/google/cloud/pubsub_v1/__init__.py | 2 ++ pubsub/google/cloud/pubsub_v1/publisher/future.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/__init__.py b/pubsub/google/cloud/pubsub_v1/__init__.py index 55454e211015..7e785dc9dc7a 100644 --- a/pubsub/google/cloud/pubsub_v1/__init__.py +++ b/pubsub/google/cloud/pubsub_v1/__init__.py @@ -14,10 +14,12 @@ from __future__ import absolute_import +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import PublisherClient from google.cloud.pubsub_v1.subscriber import SubscriberClient __all__ = ( 'PublisherClient', 'SubscriberClient', + 'types', ) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py index 486e94700283..606bedc31a84 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/future.py @@ -15,8 +15,8 @@ from __future__ import absolute_import import queue -import uuid import time +import uuid class Future(object): From ccaa865d62a71b21fefd279688b55b95bee326ae Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 10:05:37 -0700 Subject: [PATCH 17/86] WIP --- pubsub/google/cloud/pubsub_v1/publisher/client.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 4e10630067b5..58d7b402725b 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -47,18 +47,16 @@ class PublisherClient(object): The default is :class:`multiprocessing.Process` kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying - :class:`~gapic.pubsub.v1.publisher_client.PublisherClient`. + :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. Generally, you should not need to set additional keyword arguments. """ - _gapic_class = publisher_client.PublisherClient - def __init__(self, batching=(), thread_class=multiprocessing.Process, queue_class=multiprocessing.Queue, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. kwargs['lib_name'] = 'gccl' kwargs['lib_version'] = __VERSION__ - self.api = self._gapic_class(**kwargs) + self.api = publisher_client.PublisherClient(**kwargs) self.batching = types.Batching(*batching) # Set the manager, which is responsible for granting shared memory @@ -160,7 +158,7 @@ def publish(self, topic, data, **attrs): if the ``attrs`` are not either a ``str`` or ``bytes``. Returns: - :class:~`pubsub_v1.publisher.futures.Future`: An object conforming + :class:`~.pubsub_v1.publisher.futures.Future`: An object conforming to the ``concurrent.futures.Future`` interface. """ return self.batch(topic).publish(data, *attrs) From f2ee4d4aaeefe91bc59fd7d241a9f3f2401d7b24 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 13:28:18 -0700 Subject: [PATCH 18/86] Rework based on @jonparrott concurrency ideas. --- pubsub/google/cloud/pubsub_v1/_gapic.py | 8 +- .../google/cloud/pubsub_v1/publisher/batch.py | 233 ------------------ .../cloud/pubsub_v1/publisher/client.py | 79 +++--- .../cloud/pubsub_v1/publisher/future.py | 150 ----------- pubsub/google/cloud/pubsub_v1/types.py | 6 +- 5 files changed, 47 insertions(+), 429 deletions(-) delete mode 100644 pubsub/google/cloud/pubsub_v1/publisher/batch.py delete mode 100644 pubsub/google/cloud/pubsub_v1/publisher/future.py diff --git a/pubsub/google/cloud/pubsub_v1/_gapic.py b/pubsub/google/cloud/pubsub_v1/_gapic.py index 145682860215..79aac7de8941 100644 --- a/pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/pubsub/google/cloud/pubsub_v1/_gapic.py @@ -17,7 +17,7 @@ import functools -def add_methods(SourceClass, blacklist=()): +def add_methods(source_class, blacklist=()): """Add wrapped versions of the `api` member's methods to the class. Any methods passed in `blacklist` are not added. @@ -47,7 +47,7 @@ def wrap(wrapped_fx): def actual_decorator(cls): # Reflectively iterate over most of the methods on the source class # (the GAPIC) and make wrapped versions available on this client. - for name in dir(SourceClass): + for name in dir(source_class): # Ignore all private and magic methods. if name.startswith('_'): continue @@ -57,12 +57,12 @@ def actual_decorator(cls): continue # Retrieve the attribute, and ignore it if it is not callable. - attr = getattr(cls._gapic_class, name) + attr = getattr(source_class, name) if not callable(attr): continue # Add a wrapper method to this object. - fx = wrap(getattr(cls._gapic_class, name)) + fx = wrap(getattr(source_class, name)) setattr(cls, name, fx) # Return the augmented class. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch.py b/pubsub/google/cloud/pubsub_v1/publisher/batch.py deleted file mode 100644 index f3618d6af164..000000000000 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch.py +++ /dev/null @@ -1,233 +0,0 @@ -# Copyright 2017, Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import collections -import copy -import queue -import time - -import six - -from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.publisher import exceptions -from google.cloud.pubsub_v1.publisher import future - - -class Batch(object): - """A batch of messages. - - The batch is the internal group of messages which are either awaiting - publication or currently in-flight. - - A batch is automatically created by the PublisherClient when the first - message to be published is received; subsequent messages are added to - that batch until the process of actual publishing _starts_. - - Once this occurs, any new messages sent to :meth:`publish` open a new - batch. - - If you are using this library, you most likely do not need to instantiate - batch objects directly; they will be created for you. If you want to - change the actual batching settings, see the ``batching`` argument on - :class:`~.pubsub_v1.PublisherClient`. - - Args: - client (~.pubsub_v1.PublisherClient): The publisher client used to - create this batch. - topic (str): The topic. The format for this is - ``projects/{project}/topics/{topic}``. - settings (~.pubsub_v1.types.Batching): The settings for batch - publishing. These should be considered immutable once the batch - has been opened. - autocommit (bool): Whether to autocommit the batch when the time - has elapsed. Defaults to True unless ``settings.max_latency`` is - inf. - """ - def __init__(self, client, topic, settings, autocommit=True): - self._client = client - - # Create a namespace that is owned by the client manager; this - # is necessary to be able to have these values be communicable between - # processes. - self._shared = self.manager.Namespace() - self._shared.futures = self.manager.list() - self._shared.messages = self.manager.list() - self._shared.message_ids = self.manager.dict() - self._shared.settings = settings - self._shared.status = 'accepting messages' - self._shared.topic = topic - - # This is purely internal tracking. - self._process = None - - # Continually monitor the thread until it is time to commit the - # batch, or the batch is explicitly committed. - if autocommit and self._shared.settings.max_latency < float('inf'): - self._process = self._client.thread_class(target=self.monitor) - self._process.start() - - @property - def client(self): - """Return the client that created this batch. - - Returns: - ~.pubsub_v1.client.Client: The client that created this batch. - """ - return self._sharedclient - - @property - def manager(self): - """Return the client's manager. - - Returns: - :class:`multiprocessing.Manager`: The manager responsible for - handling shared memory objects. - """ - return self._client.manager - - @property - def status(self): - """Return the status of this batch. - - Returns: - str: The status of this batch. All statuses are human-readable, - all-lowercase strings. - """ - return self._shared.status - - def commit(self): - """Actually publish all of the messages on the active batch. - - This moves the batch out from being the active batch to an in-flight - batch on the publisher, and then the batch is discarded upon - completion. - """ - # Update the status. - self._shared.status = 'in-flight' - - # Begin the request to publish these messages. - if len(self._shared.messages) == 0: - raise Exception('Empty queue') - response = self._client.api.publish( - self._shared.topic, - self._shared.messages, - ) - - # FIXME (lukesneeringer): Check for failures; retry. - - # We got a response from Pub/Sub; denote that we are processing. - self._status = 'processing results' - - # Sanity check: If the number of message IDs is not equal to the - # number of futures I have, then something went wrong. - if len(response.message_ids) != len(self._shared.futures): - raise exceptions.PublishError( - 'Some messages were not successfully published.', - ) - - # Iterate over the futures on the queue and return the response IDs. - # We are trusting that there is a 1:1 mapping, and raise an exception - # if not. - self._shared.status = 'success' - for message_id, fut in zip(response.message_ids, self._shared.futures): - self._shared.message_ids[hash(fut)] = message_id - fut._trigger() - - def monitor(self): - """Commit this batch after sufficient time has elapsed. - - This simply sleeps for ``self._settings.max_latency`` seconds, - and then calls commit unless the batch has already been committed. - """ - # Note: This thread blocks; it is up to the calling code to call it - # in a separate thread. - # - # Sleep for however long we should be waiting. - time.sleep(self._shared.settings.max_latency) - - # If, in the intervening period, the batch started to be committed, - # then no-op at this point. - if self._shared.status != 'accepting messages': - return - - # Commit. - return self.commit() - - def publish(self, data, **attrs): - """Publish a single message. - - .. note:: - Messages in Pub/Sub are blobs of bytes. They are *binary* data, - not text. You must send data as a bytestring - (``bytes`` in Python 3; ``str`` in Python 2), and this library - will raise an exception if you send a text string. - - The reason that this is so important (and why we do not try to - coerce for you) is because Pub/Sub is also platform independent - and there is no way to know how to decode messages properly on - the other side; therefore, encoding and decoding is a required - exercise for the developer. - - Add the given message to this object; this will cause it to be - published once the batch either has enough messages or a sufficient - period of time has elapsed. - - Args: - data (bytes): A bytestring representing the message body. This - must be a bytestring (a text string will raise TypeError). - attrs (Mapping[str, str]): A dictionary of attributes to be - sent as metadata. (These may be text strings or byte strings.) - - Raises: - TypeError: If the ``data`` sent is not a bytestring, or if the - ``attrs`` are not either a ``str`` or ``bytes``. - - Returns: - ~.pubsub_v1.publisher.future.Future: An object conforming to the - :class:`concurrent.futures.Future` interface. - """ - # Sanity check: Is the data being sent as a bytestring? - # If it is literally anything else, complain loudly about it. - if not isinstance(data, six.binary_type): - raise TypeError('Data being published to Pub/Sub must be sent ' - 'as a bytestring.') - - # Coerce all attributes to text strings. - for k, v in copy.copy(attrs).items(): - if isinstance(data, six.text_type): - continue - if isinstance(data, six.binary_type): - attrs[k] = v.decode('utf-8') - continue - raise TypeError('All attributes being published to Pub/Sub must ' - 'be sent as text strings.') - - # Store the actual message in the batch's message queue. - self._shared.messages.append( - types.PubsubMessage(data=data, attributes=attrs), - ) - - # Return a Future. That future needs to be aware of the status - # of this batch. - f = future.Future(self._) - self._shared.futures.append(f) - return f - - -# Make a fake batch. This is used by the client to do single-op checks -# for batch existence. -FakeBatch = collections.namedtuple('FakeBatch', ['status']) -FAKE = FakeBatch(status='fake') diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 58d7b402725b..84848007ffcc 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -24,8 +24,8 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.publisher.batch import Batch -from google.cloud.pubsub_v1.publisher.batch import FAKE +from google.cloud.pubsub_v1.publisher.batch import base +from google.cloud.pubsub_v1.publisher.batch import mp __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -40,54 +40,41 @@ class PublisherClient(object): get sensible defaults. Args: - batching (:class:`google.cloud.pubsub_v1.types.Batching`): The - settings for batch publishing. - thread_class (class): Any class that is duck-type compatible with - :class:`threading.Thread`. - The default is :class:`multiprocessing.Process` + batch_settings (~.pubsub_v1.types.BatchSettings): The settings + for batch publishing. + batch_class (class): A class that describes how to handle + batches. You may subclass the + :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in + order to define your own batcher. This is primarily provided to + allow use of different concurrency models; the default + is based on :class:`multiprocessing.Process`. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. Generally, you should not need to set additional keyword arguments. """ - def __init__(self, batching=(), thread_class=multiprocessing.Process, - queue_class=multiprocessing.Queue, **kwargs): + def __init__(self, batch_settings=(), batch_class=mp.Batch, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. kwargs['lib_name'] = 'gccl' kwargs['lib_version'] = __VERSION__ self.api = publisher_client.PublisherClient(**kwargs) - self.batching = types.Batching(*batching) - - # Set the manager, which is responsible for granting shared memory - # objects. - self._manager = multiprocessing.Manager() - - # Set the thread class. - self._thread_class = thread_class + self.batch_settings = types.BatchSettings(*batch_settings) # The batches on the publisher client are responsible for holding # messages. One batch exists for each topic. + self._batch_class = batch_class self._batches = {} @property - def manager(self): - """Return the manager. - - Returns: - :class:`multiprocessing.Manager`: The manager responsible for - handling shared memory objects. - """ - return self._manager - - @property - def thread_class(self): - """Return the thread class provided at instantiation. + def concurrency(self): + """Return the concurrency strategy instance. Returns: - class: A class duck-type compatible with :class:`threading.Thread`. + ~.pubsub_v1.concurrency.base.PublishStrategy: The class responsible + for handling publishing concurrency. """ - return self._thread_class + return self._concurrency def batch(self, topic, create=True, autocommit=True): """Return the current batch. @@ -106,13 +93,14 @@ def batch(self, topic, create=True, autocommit=True): """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - if self._batches.get(topic, FAKE).status != 'accepting messages': + accepting = base.BaseBatch.Status.ACCEPTING_MESSAGES + if self._batches.get(topic, base.FAKE).status != accepting: if not create: return None - self._batches[topic] = Batch( + self._batches[topic] = self._batch_class( autocommit=autocommit, client=self, - settings=self.batching, + settings=self.batch_settings, topic=topic, ) @@ -149,16 +137,29 @@ def publish(self, topic, data, **attrs): topic (~.pubsub_v1.types.Topic): The topic to publish messages to. data (bytes): A bytestring representing the message body. This - must be a bytestring (a text string will raise TypeError). + must be a bytestring. attrs (Mapping[str, str]): A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) - Raises: - :exc:`TypeError`: If the ``data`` sent is not a bytestring, or - if the ``attrs`` are not either a ``str`` or ``bytes``. - Returns: :class:`~.pubsub_v1.publisher.futures.Future`: An object conforming to the ``concurrent.futures.Future`` interface. """ + # Sanity check: Is the data being sent as a bytestring? + # If it is literally anything else, complain loudly about it. + if not isinstance(data, six.binary_type): + raise TypeError('Data being published to Pub/Sub must be sent ' + 'as a bytestring.') + + # Coerce all attributes to text strings. + for k, v in copy.copy(attrs).items(): + if isinstance(data, six.text_type): + continue + if isinstance(data, six.binary_type): + attrs[k] = v.decode('utf-8') + continue + raise TypeError('All attributes being published to Pub/Sub must ' + 'be sent as text strings.') + + # Delegate the publishing to the batch. return self.batch(topic).publish(data, *attrs) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/future.py b/pubsub/google/cloud/pubsub_v1/publisher/future.py deleted file mode 100644 index 606bedc31a84..000000000000 --- a/pubsub/google/cloud/pubsub_v1/publisher/future.py +++ /dev/null @@ -1,150 +0,0 @@ -# Copyright 2017, Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import queue -import time -import uuid - - -class Future(object): - """Encapsulation of the asynchronous execution of an action. - - This object is returned from asychronous Pub/Sub calls, and is the - interface to determine the status of those calls. - - This object should not be created directly, but is returned by other - methods in this library. - - Args: - batch (:class:`multiprocessing.Namespace`): Information about the - batch object that is committing this message. - """ - def __init__(self, batch_info): - self._batch_info = batch_info - self._hash = hash(uuid.uuid4()) - self._callbacks = [] - - def __hash__(self): - return self._hash - - def cancel(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns False. - """ - return False - - def cancelled(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns False. - """ - return False - - def running(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns True. - """ - return True - - def done(self): - """Return True if the publish has completed, False otherwise. - - This still returns True in failure cases; checking `result` or - `exception` is the canonical way to assess success or failure. - """ - return self._batch_info.status in ('success', 'error') - - def result(self, timeout=None): - """Return the message ID, or raise an exception. - - This blocks until the message has successfully been published, and - returns the message ID. - - Args: - timeout (int|float): The number of seconds before this call - times out and raises TimeoutError. - - Raises: - :class:~`pubsub_v1.TimeoutError`: If the request times out. - :class:~`Exception`: For undefined exceptions in the underlying - call execution. - """ - # Attempt to get the exception if there is one. - # If there is not one, then we know everything worked, and we can - # return an appropriate value. - err = self.exception(timeout=timeout) - if err is None: - return self._batch_info.message_ids[hash(self)] - raise err - - def exception(self, timeout=None, _wait=1): - """Return the exception raised by the call, if any. - - This blocks until the message has successfully been published, and - returns the exception. If the call succeeded, return None. - - Args: - timeout (int|float): The number of seconds before this call - times out and raises TimeoutError. - - Raises: - :exc:`TimeoutError`: If the request times out. - - Returns: - :class:`Exception`: The exception raised by the call, if any. - """ - # If the batch completed successfully, this should return None. - if self._batch_info.status == 'success': - return None - - # If this batch had an error, this should return it. - if self._batch_info.status == 'error': - return self._batch_info.error - - # If the timeout has been exceeded, raise TimeoutError. - if timeout and timeout < 0: - raise TimeoutError('Timed out waiting for an exception.') - - # Wait a little while and try again. - time.sleep(_wait) - return self.exception( - timeout=timeout - _wait, - _wait=min(_wait * 2, 60), - ) - - def add_done_callback(self, fn): - """Attach the provided callable to the future. - - The provided function is called, with this future as its only argument, - when the future finishes running. - """ - if self.done(): - fn(self) - self._callbacks.append(fn) - - def _trigger(self): - """Trigger all callbacks registered to this Future. - - This method is called internally by the batch once the batch - completes. - - Args: - message_id (str): The message ID, as a string. - """ - for callback in self._callbacks: - callback(self) diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index b500d159a3a6..778391f1c21f 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -28,10 +28,10 @@ # This class is used when creating a publisher or subscriber client, and # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. -Batching = collections.namedtuple('Batching', +BatchSettings = collections.namedtuple('BatchSettings', ['max_bytes', 'max_latency', 'max_messages'], ) -Batching.__new__.__defaults__ = ( +BatchSettings.__new__.__defaults__ = ( 1024 * 1024 * 5, # max_bytes: 5 MB 0.25, # max_latency: 0.25 seconds 1000, # max_messages: 1,000 @@ -51,7 +51,7 @@ ) -names = ['Batching', 'FlowControl'] +names = ['BatchSettings', 'FlowControl'] for name, message in get_messages(pubsub_pb2).items(): setattr(sys.modules[__name__], name, message) names.append(name) From 12d5546036d55ef8a2576ec962631dd41241458e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 14:03:11 -0700 Subject: [PATCH 19/86] Refactor the batching implementation. --- .../pubsub_v1/publisher/batch/__init__.py | 0 .../cloud/pubsub_v1/publisher/batch/base.py | 107 ++++++ .../cloud/pubsub_v1/publisher/batch/mp.py | 360 ++++++++++++++++++ 3 files changed, 467 insertions(+) create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/batch/base.py create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py new file mode 100644 index 000000000000..d0e9e3885d2a --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -0,0 +1,107 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import abc +import collections + +import six + + +@six.add_metaclass(abc.ABCMeta) +class BaseBatch(object): + """The base batching class for Pub/Sub publishing. + + Although the :class:`~.pubsub_v1.publisher.batch.mp.Batch` class, based + on :class:`multiprocessing.Process`, is fine for most cases, advanced + users may need to implement something based on a different concurrency + model. + + This class defines the interface for the Batch implementation; + subclasses may be passed as the ``batch_class`` argument to + :class:`~.pubsub_v1.client.PublisherClient`. + """ + @property + @abc.abstractmethod + def client(self): + """Return the client used to create this batch. + + Returns: + ~.pubsub_v1.client.PublisherClient: A publisher client. + """ + raise NotImplementedError + + @property + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings, and represented in the + :class:`BaseBatch.Status` enum. + """ + raise NotImplementedError + + def publish(self, data, **attrs): + """Publish a single message. + + .. note:: + Messages in Pub/Sub are blobs of bytes. They are *binary* data, + not text. You must send data as a bytestring + (``bytes`` in Python 3; ``str`` in Python 2), and this library + will raise an exception if you send a text string. + + The reason that this is so important (and why we do not try to + coerce for you) is because Pub/Sub is also platform independent + and there is no way to know how to decode messages properly on + the other side; therefore, encoding and decoding is a required + exercise for the developer. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + Args: + data (bytes): A bytestring representing the message body. This + must be a bytestring (a text string will raise TypeError). + attrs (Mapping[str, str]): A dictionary of attributes to be + sent as metadata. (These may be text strings or byte strings.) + + Raises: + TypeError: If the ``data`` sent is not a bytestring, or if the + ``attrs`` are not either a ``str`` or ``bytes``. + + Returns: + ~.pubsub_v1.publisher.future.Future: An object conforming to the + :class:`concurrent.futures.Future` interface. + """ + raise NotImplementedError + + class Status(object): + """An enum class representing valid statuses for a batch. + + It is acceptable for a class to use a status that is not on this + class; this represents the list of statuses where the existing + library hooks in functionality. + """ + ACCEPTING_MESSAGES = 'accepting messages' + ERROR = 'error' + SUCCESS = 'success' + + +# Make a fake batch. This is used by the client to do single-op checks +# for batch existence. +FakeBatch = collections.namedtuple('FakeBatch', ['status']) +FAKE = FakeBatch(status='fake') diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py new file mode 100644 index 000000000000..ac4a1cf926e7 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py @@ -0,0 +1,360 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import copy +import multiprocessing +import queue +import time +import uuid + +import six + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.batch import base + + +class Batch(base.BaseBatch): + """A batch of messages. + + The batch is the internal group of messages which are either awaiting + publication or currently in-flight. + + A batch is automatically created by the PublisherClient when the first + message to be published is received; subsequent messages are added to + that batch until the process of actual publishing _starts_. + + Once this occurs, any new messages sent to :meth:`publish` open a new + batch. + + If you are using this library, you most likely do not need to instantiate + batch objects directly; they will be created for you. If you want to + change the actual batching settings, see the ``batching`` argument on + :class:`~.pubsub_v1.PublisherClient`. + + Args: + client (~.pubsub_v1.PublisherClient): The publisher client used to + create this batch. + topic (str): The topic. The format for this is + ``projects/{project}/topics/{topic}``. + settings (~.pubsub_v1.types.Batching): The settings for batch + publishing. These should be considered immutable once the batch + has been opened. + autocommit (bool): Whether to autocommit the batch when the time + has elapsed. Defaults to True unless ``settings.max_latency`` is + inf. + """ + def __init__(self, client, topic, settings, autocommit=True): + self._client = client + self._manager = multiprocessing.Manager() + + # Create a namespace that is owned by the client manager; this + # is necessary to be able to have these values be communicable between + # processes. + self._shared = self.manager.Namespace() + self._shared.futures = self.manager.list() + self._shared.messages = self.manager.list() + self._shared.message_ids = self.manager.dict() + self._shared.settings = settings + self._shared.status = self.Status.ACCEPTING_MESSAGES + self._shared.topic = topic + + # This is purely internal tracking. + self._process = None + + # Continually monitor the thread until it is time to commit the + # batch, or the batch is explicitly committed. + if autocommit and self._shared.settings.max_latency < float('inf'): + self._process = multiprocessing.Process(target=self.monitor) + self._process.start() + + @property + def client(self): + """Return the client used to create this batch. + + Returns: + ~.pubsub_v1.client.PublisherClient: A publisher client. + """ + return self._client + + @property + def manager(self): + """Return the client's manager. + + Returns: + :class:`multiprocessing.Manager`: The manager responsible for + handling shared memory objects. + """ + return self._manager + + @property + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. + """ + return self._shared.status + + def commit(self): + """Actually publish all of the messages on the active batch. + + This moves the batch out from being the active batch to an in-flight + batch on the publisher, and then the batch is discarded upon + completion. + """ + # Update the status. + self._shared.status = 'in-flight' + + # Begin the request to publish these messages. + if len(self._shared.messages) == 0: + raise Exception('Empty queue') + response = self._client.api.publish( + self._shared.topic, + self._shared.messages, + ) + + # FIXME (lukesneeringer): Check for failures; retry. + + # We got a response from Pub/Sub; denote that we are processing. + self._status = 'processing results' + + # Sanity check: If the number of message IDs is not equal to the + # number of futures I have, then something went wrong. + if len(response.message_ids) != len(self._shared.futures): + raise exceptions.PublishError( + 'Some messages were not successfully published.', + ) + + # Iterate over the futures on the queue and return the response IDs. + # We are trusting that there is a 1:1 mapping, and raise an exception + # if not. + self._shared.status = self.Status.SUCCESS + for message_id, fut in zip(response.message_ids, self._shared.futures): + self._shared.message_ids[hash(fut)] = message_id + fut._trigger() + + def monitor(self): + """Commit this batch after sufficient time has elapsed. + + This simply sleeps for ``self._settings.max_latency`` seconds, + and then calls commit unless the batch has already been committed. + """ + # Note: This thread blocks; it is up to the calling code to call it + # in a separate thread. + # + # Sleep for however long we should be waiting. + time.sleep(self._shared.settings.max_latency) + + # If, in the intervening period, the batch started to be committed, + # then no-op at this point. + if self._shared.status != self.Status.ACCEPTING_MESSAGES: + return + + # Commit. + return self.commit() + + def publish(self, data, **attrs): + """Publish a single message. + + .. note:: + Messages in Pub/Sub are blobs of bytes. They are *binary* data, + not text. You must send data as a bytestring + (``bytes`` in Python 3; ``str`` in Python 2), and this library + will raise an exception if you send a text string. + + The reason that this is so important (and why we do not try to + coerce for you) is because Pub/Sub is also platform independent + and there is no way to know how to decode messages properly on + the other side; therefore, encoding and decoding is a required + exercise for the developer. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + Args: + data (bytes): A bytestring representing the message body. This + must be a bytestring (a text string will raise TypeError). + attrs (Mapping[str, str]): A dictionary of attributes to be + sent as metadata. (These may be text strings or byte strings.) + + Raises: + TypeError: If the ``data`` sent is not a bytestring, or if the + ``attrs`` are not either a ``str`` or ``bytes``. + + Returns: + ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the + :class:`concurrent.futures.Future` interface. + """ + # Sanity check: Is the data being sent as a bytestring? + # If it is literally anything else, complain loudly about it. + if not isinstance(data, six.binary_type): + raise TypeError('Data being published to Pub/Sub must be sent ' + 'as a bytestring.') + + # Coerce all attributes to text strings. + for k, v in copy.copy(attrs).items(): + if isinstance(data, six.text_type): + continue + if isinstance(data, six.binary_type): + attrs[k] = v.decode('utf-8') + continue + raise TypeError('All attributes being published to Pub/Sub must ' + 'be sent as text strings.') + + # Store the actual message in the batch's message queue. + self._shared.messages.append( + types.PubsubMessage(data=data, attributes=attrs), + ) + + # Return a Future. That future needs to be aware of the status + # of this batch. + f = Future(self._shared) + self._shared.futures.append(f) + return f + + +class Future(object): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from asychronous Pub/Sub calls, and is the + interface to determine the status of those calls. + + This object should not be created directly, but is returned by other + methods in this library. + + Args: + batch (:class:`multiprocessing.Namespace`): Information about the + batch object that is committing this message. + """ + def __init__(self, batch_info): + self._batch_info = batch_info + self._callbacks = [] + self._hash = hash(uuid.uuid4()) + + def __hash__(self): + return self._hash + + def cancel(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def cancelled(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def running(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns True. + """ + return True + + def done(self): + """Return True if the publish has completed, False otherwise. + + This still returns True in failure cases; checking `result` or + `exception` is the canonical way to assess success or failure. + """ + return self._batch_info.status in ('success', 'error') + + def result(self, timeout=None): + """Return the message ID, or raise an exception. + + This blocks until the message has successfully been published, and + returns the message ID. + + Args: + timeout (int|float): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + :class:~`pubsub_v1.TimeoutError`: If the request times out. + :class:~`Exception`: For undefined exceptions in the underlying + call execution. + """ + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self._batch_info.message_ids[hash(self)] + raise err + + def exception(self, timeout=None, _wait=1): + """Return the exception raised by the call, if any. + + This blocks until the message has successfully been published, and + returns the exception. If the call succeeded, return None. + + Args: + timeout (int|float): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + :exc:`TimeoutError`: If the request times out. + + Returns: + :class:`Exception`: The exception raised by the call, if any. + """ + # If the batch completed successfully, this should return None. + if self._batch_info.status == 'success': + return None + + # If this batch had an error, this should return it. + if self._batch_info.status == 'error': + return self._batch_info.error + + # If the timeout has been exceeded, raise TimeoutError. + if timeout and timeout < 0: + raise TimeoutError('Timed out waiting for an exception.') + + # Wait a little while and try again. + time.sleep(_wait) + return self.exception( + timeout=timeout - _wait, + _wait=min(_wait * 2, 60), + ) + + def add_done_callback(self, fn): + """Attach the provided callable to the future. + + The provided function is called, with this future as its only argument, + when the future finishes running. + """ + if self.done(): + fn(self) + self._callbacks.append(fn) + + def _trigger(self): + """Trigger all callbacks registered to this Future. + + This method is called internally by the batch once the batch + completes. + + Args: + message_id (str): The message ID, as a string. + """ + for callback in self._callbacks: + callback(self) From e99d959bf2eb910d4d880d177a7b23180ec57a58 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 14:15:06 -0700 Subject: [PATCH 20/86] Remove unrelated files. --- pubsub/google/cloud/pubsub_v1/retry.py | 35 --------------------- pubsub/google/cloud/pubsub_v1/subscriber.py | 34 -------------------- 2 files changed, 69 deletions(-) delete mode 100644 pubsub/google/cloud/pubsub_v1/retry.py delete mode 100644 pubsub/google/cloud/pubsub_v1/subscriber.py diff --git a/pubsub/google/cloud/pubsub_v1/retry.py b/pubsub/google/cloud/pubsub_v1/retry.py deleted file mode 100644 index 3c098faa1e37..000000000000 --- a/pubsub/google/cloud/pubsub_v1/retry.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2017, Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -def retry(func, delay=0, count=0, err=None, **kwargs): - """Attempt to retry a function after the provided delay. - - If there have been too many retries, raise an exception. - - Args: - func (callable): The function to retry. - delay (int): The period to delay before retrying; specified in seconds. - count (int): The number of previous retries that have occurred. - If this is >= 5, an exception will be raised. - **kwargs (dict): Other keyword arguments to pass to the function. - """ - # If there have been too many retries, simply raise the exception. - if count >= 5: - raise err - - # Sleep the given delay. - time.sleep(delay) - - # Try calling the method again. - return func(delay=delay, count=count, **kwargs) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber.py b/pubsub/google/cloud/pubsub_v1/subscriber.py deleted file mode 100644 index 001e6abac9bb..000000000000 --- a/pubsub/google/cloud/pubsub_v1/subscriber.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2017, Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import functools -import pkg_resources - -from google.cloud.gapic.pubsub.v1 import subscriber_client - - -__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version - - -class SubscriberClient(subscriber_client.SubscriberClient): - @functools.wraps(subscriber_client.SubscriberClient.__init__) - def __init__(self, *args, **kwargs): - kwargs['lib_name'] = 'gccl' - kwargs['lib_version'] = __VERSION__ - super(SubscriberClient, self).__init__(*args, **kwargs) - - def get_subscription(self, subscription, options=None): - """Return the """ From 4774f2a7ecf8a70e891f3ed446d04564785aa89b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 20:38:55 -0700 Subject: [PATCH 21/86] wip --- pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py | 4 ++++ pubsub/google/cloud/pubsub_v1/publisher/client.py | 3 +-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py index ac4a1cf926e7..ef5dfcfb11a4 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py @@ -45,6 +45,10 @@ class Batch(base.BaseBatch): change the actual batching settings, see the ``batching`` argument on :class:`~.pubsub_v1.PublisherClient`. + Any properties or methods on this class which are not defined in + :class:`~.pubsub_v1.publisher.batch.BaseBatch` should be considered + implementation details. + Args: client (~.pubsub_v1.PublisherClient): The publisher client used to create this batch. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 84848007ffcc..0e815395d74c 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -15,7 +15,6 @@ from __future__ import absolute_import import functools -import multiprocessing import pkg_resources import six @@ -33,7 +32,7 @@ @_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) class PublisherClient(object): - """A publisher client for Cloud Pub/Sub. + """A publisher client for Google Cloud Pub/Sub. This creates an object that is capable of publishing messages. Generally, you can instantiate this client with no arguments, and you From 0b55ff5ec10167fcbb9eb62e37a9aa03620b73ed Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 21:11:39 -0700 Subject: [PATCH 22/86] wip --- .../cloud/pubsub_v1/subscriber/__init__.py | 22 +++++++ .../cloud/pubsub_v1/subscriber/client.py | 65 +++++++++++++++++++ 2 files changed, 87 insertions(+) create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/__init__.py create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/client.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py new file mode 100644 index 000000000000..8a122da149d1 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_literals + +from google.cloud.pubsub_v1.subscriber.client import SubscriberClient + + +__all__ = ( + 'SubscriberClient', +) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py new file mode 100644 index 000000000000..9bda5d206ef4 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -0,0 +1,65 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import functools +import pkg_resources + +import six + +from google.cloud.gapic.pubsub.v1 import subscriber_client + +from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(subscriber_client.SubscriberClient, + blacklist=('pull', 'streaming_pull')): +class SubscriberClient(object): + """A subscriber client for Google Cloud Pub/Sub. + + This creates an object that is capable of subscribing to messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings to be used on individual subscriptions. + subscription_class (class): A class that describes how to handle + subscriptions. You may subclass the + :class:`.pubsub_v1.subscriber.subscription.base.BaseSubscription` + class in order to define your own subscriber. This is primarily + provided to allow use of different concurrency models; the default + is based on :class:`multiprocessing.Process`. + **kwargs (dict): Any additional arguments provided are sent as keyword + keyword arguments to the underlying + :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. + Generally, you should not need to set additional keyword + arguments. + """ + def __init__(self, flow_control=(), subscription_class=mp.Subscription, + **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = subscriber_client.SubscriberClient(**kwargs) + + # The subcription class is responsible to retrieving and dispatching + # messages. + self._subscription_class = subscription_class From 210ef3bb6fdb00a14abad22092597489e72c830e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 23:25:09 -0700 Subject: [PATCH 23/86] Wrote some docs; not much else. --- .../cloud/pubsub_v1/subscriber/client.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index 9bda5d206ef4..a570795ea4f2 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -63,3 +63,31 @@ def __init__(self, flow_control=(), subscription_class=mp.Subscription, # The subcription class is responsible to retrieving and dispatching # messages. self._subscription_class = subscription_class + + def subscribe(self, topic, name, callback=None, flow_control=()): + """Return a representation of an individual subscription. + + This method creates and returns a ``Subscription`` object (that is, a + :class:`~.pubsub_v1.subscriber.subscription.base.BaseSubscription`) + subclass) bound to the topic. It does `not` create the subcription + on the backend (or do any API call at all); it simply returns an + object capable of doing these things. + + If the ``callback`` argument is provided, then the :meth:`open` method + is automatically called on the returned object. If ``callback`` is + not provided, the subscription is returned unopened. + + .. note:: + It only makes sense to provide ``callback`` here if you have + already created the subscription manually in the API. + + Args: + topic (str): The topic being subscribed to. + name (str): The name of the subscription. + callback (function): The callback function. This function receives + the :class:`~.pubsub_v1.types.PubsubMessage` as its only + argument. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. Use this to prevent situations where you are + inundated with too many messages at once. + """ From 7cd6156335cdaa81139a94804811a33b13a7afc4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Sat, 3 Jun 2017 17:10:10 -0700 Subject: [PATCH 24/86] subscriber wip --- .../cloud/pubsub_v1/subscriber/client.py | 15 +- .../pubsub_v1/subscriber/consumer/__init__.py | 0 .../pubsub_v1/subscriber/consumer/base.py | 77 ++++++++++ .../cloud/pubsub_v1/subscriber/consumer/mp.py | 0 .../cloud/pubsub_v1/subscriber/histogram.py | 144 ++++++++++++++++++ .../cloud/pubsub_v1/subscriber/message.py | 116 ++++++++++++++ 6 files changed, 345 insertions(+), 7 deletions(-) create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/consumer/__init__.py create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/histogram.py create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/message.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index a570795ea4f2..14c4da4d25af 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -23,6 +23,7 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.consumer import mp __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -40,10 +41,10 @@ class SubscriberClient(object): Args: flow_control (~.pubsub_v1.types.FlowControl): The flow control settings to be used on individual subscriptions. - subscription_class (class): A class that describes how to handle + consumer_class (class): A class that describes how to handle subscriptions. You may subclass the - :class:`.pubsub_v1.subscriber.subscription.base.BaseSubscription` - class in order to define your own subscriber. This is primarily + :class:`.pubsub_v1.subscriber.consumer.base.BaseConsumer` + class in order to define your own consumer. This is primarily provided to allow use of different concurrency models; the default is based on :class:`multiprocessing.Process`. **kwargs (dict): Any additional arguments provided are sent as keyword @@ -52,7 +53,7 @@ class in order to define your own subscriber. This is primarily Generally, you should not need to set additional keyword arguments. """ - def __init__(self, flow_control=(), subscription_class=mp.Subscription, + def __init__(self, flow_control=(), consumer_class=mp.Consumer, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. @@ -62,13 +63,13 @@ def __init__(self, flow_control=(), subscription_class=mp.Subscription, # The subcription class is responsible to retrieving and dispatching # messages. - self._subscription_class = subscription_class + self._consumer_class = consumer_class def subscribe(self, topic, name, callback=None, flow_control=()): """Return a representation of an individual subscription. - This method creates and returns a ``Subscription`` object (that is, a - :class:`~.pubsub_v1.subscriber.subscription.base.BaseSubscription`) + This method creates and returns a ``Consumer`` object (that is, a + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer`) subclass) bound to the topic. It does `not` create the subcription on the backend (or do any API call at all); it simply returns an object capable of doing these things. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py new file mode 100644 index 000000000000..90be68d07559 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py @@ -0,0 +1,77 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import abc + +import six + +from google.cloud.pubsub_v1.subscriber import histogram + + +@six.add_metaclass(abc.ABCMeta) +class BaseConsumer(object): + """Abstract base class for consumers. + + Although the :class:`~.pubsub_v1.subscriber.consumer.mp.Consumer` class, + based on :class:`multiprocessing.Process`, is fine for most cases, + advanced users may need to implement something based on a different + concurrency model. + + This class defines the interface for the consumer implementation; + subclasses may be passed as the ``consumer_class`` argument to + :class:`~.pubsub_v1.client.SubscriberClient`. + """ + def __init__(self, client, subscription): + self._client = client + self._subscription = subscription + self._ack_deadline = 10 + self._last_histogram_size = 0 + self.histogram = histogram.Histogram() + + @property + def ack_deadline(self): + """Return the appropriate ack deadline. + + This method is "sticky". It will only perform the computations to + check on the right ack deadline if the histogram has gained a + significant amount of new information. + + Returns: + int: The correct ack deadline. + """ + if len(self.histogram) > self._last_histogram_size * 2: + self._ack_deadline = self.histogram.percentile(percent=99) + return self._ack_deadline + + @abc.abstractmethod + def ack(self, ack_id): + """Acknowledge the message corresponding to the given ack_id.""" + raise NotImplementedError + + @abc.abstractmethod + def modify_ack_deadline(self, ack_id, seconds): + """Modify the ack deadline for the given ack_id.""" + raise NotImplementedError + + @abc.abstractmethod + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + """ + raise NotImplementedError diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py new file mode 100644 index 000000000000..7e0a96a9a877 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py @@ -0,0 +1,144 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division + + +class Histogram(object): + """Representation of a single histogram. + + The purpose of this class is to store actual ack timing information + in order to predict how long to renew leases. + + The default implementation uses the 99th percentile of previous ack + times to implicitly lease messages; however, custom + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer` subclasses + are free to use a different formula. + + The precision of data stored is to the nearest integer. Additionally, + values outside the range of ``10 <= x <= 600`` are stored as ``10`` or + ``600``, since these are the boundaries of leases in the actual API. + """ + def __init__(self): + # The data is stored as a dictionary, with the keys being the + # value being added and the values being the number of times that + # value was added to the dictionary. + # + # This is depending on the Python interpreter's implicit ordering + # of dictionaries, which is a bitwise sort by the key's ``hash()`` + # value. Because ``hash(int i) -> i`` and all of our keys are + # positive integers (negatives would be a problem because the sort + # is bitwise), we can rely on this. + self._data = {} + self._len = 0 + + def __len__(self): + """Return the total number of data points in this histogram. + + This is cached on a separate counter (rather than computing it using + ``sum([v for v in self._data.values()])``) to optimize lookup. + + Returns: + int: The total number of data points in this histogram. + """ + return self._len + + def __contains__(self, needle): + """Return True if needle is present in the histogram, False otherwise. + + Returns: + bool: True or False + """ + return needle in self._data + + def __repr__(self): + return ''.format( + len=len(self), + max=self.max, + min=self.min, + ) + + @property + def max(self): + """Return the maximum value in this histogram. + + If there are no values in the histogram at all, return 600. + + Returns: + int: The maximum value in the histogram. + """ + if len(self._data) == 0: + return 600 + return next(iter(reversed(list(self._data.keys())))) + + @property + def min(self): + """Return the minimum value in this histogram. + + If there are no values in the histogram at all, return 10. + + Returns: + int: The minimum value in the histogram. + """ + if len(self._data) == 0: + return 10 + return next(iter(self._data.keys())) + + def add(self, value): + """Add the value to this histogram. + + Args: + value (int): The value. Values outside of ``10 <= x <= 600`` + will be raised to ``10`` or reduced to ``600``. + """ + # If the value is out of bounds, bring it in bounds. + value = int(value) + if value < 10: + value = 10 + if value > 600: + value = 600 + + # Add the value to the histogram's data dictionary. + self._data.setdefault(value, 0) + self._data[value] += 1 + self._len += 1 + + def percentile(self, percent): + """Return the value that is the Nth precentile in the histogram. + + Args: + percent (int|float): The precentile being sought. The default + consumer implementations use consistently use ``99``. + + Returns: + int: The value corresponding to the requested percentile. + """ + # Sanity check: Any value over 100 should become 100. + if percent >= 100: + percent = 100 + + # Determine the actual target number. + target = len(self) - len(self) * (percent / 100) + + # Iterate over the values in reverse, dropping the target by the + # number of times each value has been seen. When the target reaches + # 0, return the value we are currently viewing. + for k in reversed(list(self._data.keys())): + target -= self._data[k] + if target <= 0: + return self._data[k] + + # The only way to get here is if there was no data. + # In this case, just return 10 seconds. + return 10 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py new file mode 100644 index 000000000000..73b650973214 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -0,0 +1,116 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import math +import time + + +class Message(object): + """A representation of a single Pub/Sub message. + + The common way to interact with + :class:`~.pubsub_v1.subscriber.message.Message` objects is to receive + them in callbacks on subscriptions; most users should never have a need + to instantiate them by hand. (The exception to this is if you are + implementing a custom subclass to + :class:`~.pubsub_v1.subscriber.consumer.BaseConsumer`.) + + .. note:: + Messages in Google Cloud Pub/Sub are opaque blobs of bytes. This + means that the ``data`` attribute will consistently be a + :class:`bytes` object. If you want a text string, you should + use :meth:`bytes.decode`. + + Properties: + message_id (str): The message ID. In general, you should not need + to use this directly. + data (bytes): The data in the message. + attributes (dict): The attributes sent along with the message. + publish_time (datetime): The time that this message was originally + published. + """ + def __init__(self, consumer, ack_id, message): + """Construct the Message. + + Args: + consumer (~.pubsub_v1.subscriber.consumer.BaseConsumer): The + consumer which originally received this message. + ack_id (str): The ack_id received from Pub/Sub. + message (~.pubsub_v1.types.PubsubMessage): The message received + from Pub/Sub. + """ + self._consumer = consumer + self._ack_id = ack_id + self.message_id = message.message_id + self.data = message.data + self.attributes = message.attributes + self.publish_time = message.publish_time + + # The instantiation time is the time that this message + # was received. Tracking this provides us a way to be smart about + # the default lease deadline. + self._received_timestamp = time.time() + + def __repr__(self): + # Get an abbreviated version of the data. + abbv_data = self._data + if len(answer) > 50: + abbv_data = abbv_data[0:50] + b'...' + + # Return a useful representation. + answer = 'Message {\n' + answer += ' data: {0!r}\n'.format(abbv_data) + answer += ' attributes: {0!r}\n'.format(self.attributes) + answer += '}' + + def ack(self): + """Acknowledge the given message. + + .. note:: + Acknowledging a message in Pub/Sub means that you are done + with it, and it will not be delivered to this subscription again. + You should avoid acknowledging messages until you have + *finished* processing them, so that in the event of a failure, + you receive the message again. + + Additionally, acks in Pub/Sub are best effort. You should always + ensure that your processing code is idempotent, as you may + receive any given message more than once. + """ + time_to_ack = math.ceil(time.time() - self._received_timestamp) + self._consumer.histogram.add(time_to_ack) + self._consumer.ack(self._ack_id) + + def modify_ack_deadline(self, seconds): + """Set the deadline for acknowledgement to the given value. + + This is not an extension; it *sets* the deadline to the given number + of seconds from right now. It is even possible to use this method to + make a deadline shorter. + + The default implementation handles this for you; you should not need + to manually deal with setting ack deadlines. The exception case is + if you are implementing your own custom subclass of + :class:`~.pubsub_v1.subcriber.consumer.BaseConsumer`. + """ + self._consumer.modify_ack_deadline(self._ack_id, seconds) + + def nack(self): + """Decline to acknowldge the given message. + + This will cause the message to be re-delivered to the subscription. + """ + self.modify_ack_deadline(seconds=0) From fc4ead324a7a39bed15c7a1d63c26c801e600ea6 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 5 Jun 2017 10:26:44 -0700 Subject: [PATCH 25/86] WIP --- .../cloud/pubsub_v1/subscriber/client.py | 18 ++++- .../pubsub_v1/subscriber/consumer/base.py | 22 ++++- .../cloud/pubsub_v1/subscriber/consumer/mp.py | 80 +++++++++++++++++++ .../cloud/pubsub_v1/subscriber/exceptions.py | 19 +++++ .../cloud/pubsub_v1/subscriber/histogram.py | 15 +++- .../cloud/pubsub_v1/subscriber/message.py | 57 ++++++++++--- 6 files changed, 192 insertions(+), 19 deletions(-) create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index 14c4da4d25af..a17621b1510e 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -40,7 +40,8 @@ class SubscriberClient(object): Args: flow_control (~.pubsub_v1.types.FlowControl): The flow control - settings to be used on individual subscriptions. + settings. Use this to prevent situations where you are + inundated with too many messages at once. consumer_class (class): A class that describes how to handle subscriptions. You may subclass the :class:`.pubsub_v1.subscriber.consumer.base.BaseConsumer` @@ -65,7 +66,7 @@ def __init__(self, flow_control=(), consumer_class=mp.Consumer, # messages. self._consumer_class = consumer_class - def subscribe(self, topic, name, callback=None, flow_control=()): + def subscribe(self, subscription, callback=None): """Return a representation of an individual subscription. This method creates and returns a ``Consumer`` object (that is, a @@ -83,12 +84,21 @@ def subscribe(self, topic, name, callback=None, flow_control=()): already created the subscription manually in the API. Args: - topic (str): The topic being subscribed to. - name (str): The name of the subscription. + subscription (str): The name of the subscription. The + subscription should have already been created (for example, + by using :meth:`create_subscription`). callback (function): The callback function. This function receives the :class:`~.pubsub_v1.types.PubsubMessage` as its only argument. flow_control (~.pubsub_v1.types.FlowControl): The flow control settings. Use this to prevent situations where you are inundated with too many messages at once. + + Returns: + ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance + of the defined ``consumer_class`` on the client. """ + subscr = self._consumer_class(self, subscription) + if callable(callback): + subscr.open(callback) + return subscr diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py index 90be68d07559..4e828107c128 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py @@ -34,12 +34,30 @@ class BaseConsumer(object): subclasses may be passed as the ``consumer_class`` argument to :class:`~.pubsub_v1.client.SubscriberClient`. """ - def __init__(self, client, subscription): + def __init__(self, client, subscription, histogram_data=None): + """Instantiate the consumer. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical format + for this is ``projects/{project}/subscriptions/{subscription}``. + histogram_data (dict): Optional: A structure to store the histogram + data for predicting appropriate ack times. If set, this should + be a dictionary-like object. + + .. note:: + Additionally, the histogram relies on the assumption + that the dictionary will properly sort keys provided + that all keys are positive integers. If you are sending + your own dictionary class, ensure this assumption holds + or you will get strange behavior. + """ self._client = client self._subscription = subscription self._ack_deadline = 10 self._last_histogram_size = 0 - self.histogram = histogram.Histogram() + self.histogram = histogram.Histogram(data=histogram_data) @property def ack_deadline(self): diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py index e69de29bb2d1..5a3e57e93deb 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py @@ -0,0 +1,80 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import abc +import multiprocessing + +import six + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import exceptions +from google.cloud.pubsub_v1.subscriber import histogram + + +class Consumer(object): + """A consumer class based on :class:``multiprocessing.Process``. + + This consumer handles the connection to the Pub/Sub service and all of + the concurrency needs. + """ + def __init__(self, client, subscription): + # Create a manager for keeping track of shared state. + self._manager = multiprocessing.Manager() + self._shared = self._manager.Namespace() + self._shared.outgoing_requests = self._manager.list() + self._shared.histogram_data = self._manager.dict() + + # Call the superclass constructor. + super(Consumer, self).__init__(client, subscription, + self._shared.histogram_data) + + # Keep track of the GRPC connection. + self._connection = None + + @abc.abstractmethod + def ack(self, ack_id): + """Acknowledge the message corresponding to the given ack_id.""" + self._shared.outgoing_requests.append(types.StreamingPullRequest( + ack_ids=[ack_id], + )) + + @abc.abstractmethod + def modify_ack_deadline(self, ack_id, seconds): + """Modify the ack deadline for the given ack_id.""" + self._shared.outgoing_requests.append(types.StreamingPullRequest( + modify_deadline_ack_ids=[ack_id], + modify_deadline_seconds=[seconds], + )) + + @abc.abstractmethod + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + """ + # Sanity check: If the connection is already open, fail. + if self._connection is not None: + raise exceptions.AlreadyOpen(self._subscription) + + # The streaming connection expects a series of StreamingPullRequest + # objects. The first one must specify the subscription and the + # ack deadline; prepend this to the list. + self._shared.outgoing_requests.insert(0, types.StreamingPullRequest( + stream_ack_deadline_seconds=self.ack_deadline, + subscription=self._subscription, + )) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py b/pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py new file mode 100644 index 000000000000..43a659974c23 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py @@ -0,0 +1,19 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + + +class AlreadyOpen(RuntimeError): + pass diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py index 7e0a96a9a877..b5df134260e4 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py @@ -30,7 +30,16 @@ class Histogram(object): values outside the range of ``10 <= x <= 600`` are stored as ``10`` or ``600``, since these are the boundaries of leases in the actual API. """ - def __init__(self): + def __init__(self, data=None): + """Instantiate the histogram. + + Args: + data (dict): The data strucure to be used to store the + underlying data. The default is an empty dictionary. + This can be set to a dictionary-like object if required + (for example, if a special object is needed for + concurrency reasons). + """ # The data is stored as a dictionary, with the keys being the # value being added and the values being the number of times that # value was added to the dictionary. @@ -40,7 +49,9 @@ def __init__(self): # value. Because ``hash(int i) -> i`` and all of our keys are # positive integers (negatives would be a problem because the sort # is bitwise), we can rely on this. - self._data = {} + if data is None: + data = {} + self._data = data self._len = 0 def __len__(self): diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 73b650973214..1433a5df6805 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -54,6 +54,7 @@ def __init__(self, consumer, ack_id, message): """ self._consumer = consumer self._ack_id = ack_id + self._message = message self.message_id = message.message_id self.data = message.data self.attributes = message.attributes @@ -76,17 +77,45 @@ def __repr__(self): answer += ' attributes: {0!r}\n'.format(self.attributes) answer += '}' + @property + def attributes(self): + """Return the attributes of the underlying Pub/Sub Message. + + Returns: + dict: The message's attributes. + """ + return self._message.attributes + + @property + def data(self): + """Return the data for the underlying Pub/Sub Message. + + Returns: + bytes: The message data. This is always a bytestring; if you + want a text string, call :meth:`bytes.decode`. + """ + return self._message.data + + @property + def publish_time(self): + """Return the time that the message was originally published. + + Returns: + datetime: The date and time that the message was published. + """ + return self._message.publish_time + def ack(self): """Acknowledge the given message. - .. note:: - Acknowledging a message in Pub/Sub means that you are done - with it, and it will not be delivered to this subscription again. - You should avoid acknowledging messages until you have - *finished* processing them, so that in the event of a failure, - you receive the message again. + Acknowledging a message in Pub/Sub means that you are done + with it, and it will not be delivered to this subscription again. + You should avoid acknowledging messages until you have + *finished* processing them, so that in the event of a failure, + you receive the message again. - Additionally, acks in Pub/Sub are best effort. You should always + .. warning:: + Acks in Pub/Sub are best effort. You should always ensure that your processing code is idempotent, as you may receive any given message more than once. """ @@ -97,14 +126,20 @@ def ack(self): def modify_ack_deadline(self, seconds): """Set the deadline for acknowledgement to the given value. - This is not an extension; it *sets* the deadline to the given number - of seconds from right now. It is even possible to use this method to - make a deadline shorter. - The default implementation handles this for you; you should not need to manually deal with setting ack deadlines. The exception case is if you are implementing your own custom subclass of :class:`~.pubsub_v1.subcriber.consumer.BaseConsumer`. + + .. note:: + This is not an extension; it *sets* the deadline to the given + number of seconds from right now. It is even possible to use this + method to make a deadline shorter. + + Args: + seconds (int): The number of seconds to set the lease deadline + to. This should be between 0 and 600. Due to network latency, + values below 10 are advised against. """ self._consumer.modify_ack_deadline(self._ack_id, seconds) From 890de3ad93f4d8bcafb9c5178fec17d26ba66544 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 5 Jun 2017 14:59:36 -0700 Subject: [PATCH 26/86] wip --- .../cloud/pubsub_v1/subscriber/consumer/mp.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py index 5a3e57e93deb..a73119a0c69e 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py @@ -39,19 +39,18 @@ def __init__(self, client, subscription): # Call the superclass constructor. super(Consumer, self).__init__(client, subscription, - self._shared.histogram_data) + histogram_data=self._shared.histogram_data, + ) # Keep track of the GRPC connection. - self._connection = None + self._process = None - @abc.abstractmethod def ack(self, ack_id): """Acknowledge the message corresponding to the given ack_id.""" self._shared.outgoing_requests.append(types.StreamingPullRequest( ack_ids=[ack_id], )) - @abc.abstractmethod def modify_ack_deadline(self, ack_id, seconds): """Modify the ack deadline for the given ack_id.""" self._shared.outgoing_requests.append(types.StreamingPullRequest( @@ -59,7 +58,6 @@ def modify_ack_deadline(self, ack_id, seconds): modify_deadline_seconds=[seconds], )) - @abc.abstractmethod def open(self, callback): """Open a streaming pull connection and begin receiving messages. @@ -78,3 +76,10 @@ def open(self, callback): stream_ack_deadline_seconds=self.ack_deadline, subscription=self._subscription, )) + + # Open the request. + self._process = multiprocessing.Process(self.stream) + self._process.start() + + def stream(self): + """Stream data to and from the Cloud Pub/Sub service.""" From 12ace0ecf754fb632f46b3a2e41ca04b7de17483 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 7 Jun 2017 07:33:56 -0700 Subject: [PATCH 27/86] wip --- .../cloud/pubsub_v1/subscriber/__init__.py | 2 +- .../cloud/pubsub_v1/subscriber/client.py | 2 +- .../pubsub_v1/subscriber/consumer/base.py | 9 +++++ .../cloud/pubsub_v1/subscriber/consumer/mp.py | 40 +++++++++++++++---- 4 files changed, 44 insertions(+), 9 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py index 8a122da149d1..ee2aaca57ef0 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import absolute_literals +from __future__ import absolute_import from google.cloud.pubsub_v1.subscriber.client import SubscriberClient diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index a17621b1510e..c2bf71c46f78 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -30,7 +30,7 @@ @_gapic.add_methods(subscriber_client.SubscriberClient, - blacklist=('pull', 'streaming_pull')): + blacklist=('pull', 'streaming_pull')) class SubscriberClient(object): """A subscriber client for Google Cloud Pub/Sub. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py index 4e828107c128..2b48b8a08578 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py @@ -74,6 +74,15 @@ def ack_deadline(self): self._ack_deadline = self.histogram.percentile(percent=99) return self._ack_deadline + @property + def subscription(self): + """Return the subscription. + + Returns: + str: The subscription + """ + return self._subscription + @abc.abstractmethod def ack(self, ack_id): """Acknowledge the message corresponding to the given ack_id.""" diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py index a73119a0c69e..42dd41ec22ef 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py @@ -34,6 +34,7 @@ def __init__(self, client, subscription): # Create a manager for keeping track of shared state. self._manager = multiprocessing.Manager() self._shared = self._manager.Namespace() + self._shared.subscription = subscription self._shared.outgoing_requests = self._manager.list() self._shared.histogram_data = self._manager.dict() @@ -45,12 +46,26 @@ def __init__(self, client, subscription): # Keep track of the GRPC connection. self._process = None + @property + def subscription(self): + """Return the subscription. + + Returns: + str: The subscription + """ + return self._shared.subscription + def ack(self, ack_id): """Acknowledge the message corresponding to the given ack_id.""" self._shared.outgoing_requests.append(types.StreamingPullRequest( ack_ids=[ack_id], )) + def close(self): + """Close the existing connection.""" + self._process.terminate() + self._process = None + def modify_ack_deadline(self, ack_id, seconds): """Modify the ack deadline for the given ack_id.""" self._shared.outgoing_requests.append(types.StreamingPullRequest( @@ -64,11 +79,22 @@ def open(self, callback): For each message received, the ``callback`` function is fired with a :class:`~.pubsub_v1.subscriber.message.Message` as its only argument. + + Args: + callback (function): The callback function. """ # Sanity check: If the connection is already open, fail. - if self._connection is not None: + if self._process is not None: raise exceptions.AlreadyOpen(self._subscription) + # Open the request. + self._process = multiprocessing.Process(self.stream) + self._process.daemon = True + self._process.start() + + def stream(self): + """Stream data to and from the Cloud Pub/Sub service.""" + # The streaming connection expects a series of StreamingPullRequest # objects. The first one must specify the subscription and the # ack deadline; prepend this to the list. @@ -77,9 +103,9 @@ def open(self, callback): subscription=self._subscription, )) - # Open the request. - self._process = multiprocessing.Process(self.stream) - self._process.start() - - def stream(self): - """Stream data to and from the Cloud Pub/Sub service.""" + import sys + try: + for r in self.api.streaming_pull(self._shared.outgoing_requests): + print(r, file=sys.stderr) + except GaxError: + return self.stream() From 73050002bfe6946641f4c3af4ddb271227238de7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 14 Jun 2017 13:34:48 -0700 Subject: [PATCH 28/86] wip --- pubsub/google/cloud/pubsub_v1/publisher/client.py | 1 + .../google/cloud/pubsub_v1/subscriber/consumer/mp.py | 10 ++++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 0e815395d74c..4941326c02a5 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -14,6 +14,7 @@ from __future__ import absolute_import +import copy import functools import pkg_resources diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py index 42dd41ec22ef..a1bdc6d2cc4e 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py @@ -19,12 +19,15 @@ import six +from google.gax.errors import GaxError + from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import exceptions from google.cloud.pubsub_v1.subscriber import histogram +from google.cloud.pubsub_v1.subscriber.consumer import base -class Consumer(object): +class Consumer(base.BaseConsumer): """A consumer class based on :class:``multiprocessing.Process``. This consumer handles the connection to the Pub/Sub service and all of @@ -105,7 +108,10 @@ def stream(self): import sys try: - for r in self.api.streaming_pull(self._shared.outgoing_requests): + outgoing = iter(self._shared.outgoing_requests) + import pdb ; pdb.set_trace() + for r in self._client.api.streaming_pull(outgoing): + import pdb ; pdb.set_trace() print(r, file=sys.stderr) except GaxError: return self.stream() From 1a53c3767aec30df1c6483aeec8d1b5d063f4f32 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 15 Jun 2017 10:53:54 -0700 Subject: [PATCH 29/86] Honor size and message count limits. --- .../cloud/pubsub_v1/publisher/batch/base.py | 23 ++++++++++ .../cloud/pubsub_v1/publisher/batch/mp.py | 42 +++++++++++++++++-- 2 files changed, 61 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index d0e9e3885d2a..54c959f12eb2 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -44,6 +44,19 @@ def client(self): raise NotImplementedError @property + @abc.abstractmethod + def settings(self): + """Return the settings for this batch. + + Returns: + ~.pubsub_v1.types.Batching: The settings for batch + publishing. These should be considered immutable once the batch + has been opened. + """ + raise NotImplementedError + + @property + @abc.abstractmethod def status(self): """Return the status of this batch. @@ -54,6 +67,16 @@ def status(self): """ raise NotImplementedError + @abc.abstractmethod + def commit(self): + """Asychronously commit everything in this batch. + + Subclasses must define this as an asychronous method; it may be called + from the primary process by :meth:`check_limits`. + """ + raise NotImplementedError + + @abc.abstractmethod def publish(self, data, **attrs): """Publish a single message. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py index ef5dfcfb11a4..52962ae264bf 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py @@ -78,10 +78,11 @@ def __init__(self, client, topic, settings, autocommit=True): # This is purely internal tracking. self._process = None + self._size = 0 # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. - if autocommit and self._shared.settings.max_latency < float('inf'): + if autocommit and self.settings.max_latency < float('inf'): self._process = multiprocessing.Process(target=self.monitor) self._process.start() @@ -104,6 +105,17 @@ def manager(self): """ return self._manager + @property + def settings(self): + """Return the settings for this batch. + + Returns: + ~.pubsub_v1.types.Batching: The settings for batch + publishing. These should be considered immutable once the batch + has been opened. + """ + return self._shared.settings + @property def status(self): """Return the status of this batch. @@ -115,6 +127,14 @@ def status(self): return self._shared.status def commit(self): + """Asychronously publish all of the messages on the active branch. + + This method may be safely called from the primary process. + """ + process = multiprocessing.Process(self._commit) + process.start() + + def _commit(self): """Actually publish all of the messages on the active batch. This moves the batch out from being the active batch to an in-flight @@ -162,15 +182,15 @@ def monitor(self): # in a separate thread. # # Sleep for however long we should be waiting. - time.sleep(self._shared.settings.max_latency) + time.sleep(self.settings.max_latency) # If, in the intervening period, the batch started to be committed, # then no-op at this point. - if self._shared.status != self.Status.ACCEPTING_MESSAGES: + if self.status != self.Status.ACCEPTING_MESSAGES: return # Commit. - return self.commit() + return self._commit() def publish(self, data, **attrs): """Publish a single message. @@ -226,10 +246,24 @@ def publish(self, data, **attrs): types.PubsubMessage(data=data, attributes=attrs), ) + # Add the size of the message to our size tracking. + self._size += len(data) + self._size += sum([len(k) + len(v) for k, v in attrs.items()]) + # Return a Future. That future needs to be aware of the status # of this batch. f = Future(self._shared) self._shared.futures.append(f) + + # Check and see if we have hit message limits. If we have, + # commit. + if len(self._shared.messages) >= self.settings.max_messages: + self._shared.status = 'at message cap' + self.commit() + if self._size >= self.settings.max_bytes: + self._shared.status = 'at size cap' + self.commit() + return f From e5a27ae6fd1c4aa337b4931b03c135961112ae82 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 27 Jun 2017 08:12:43 -0700 Subject: [PATCH 30/86] Fix a couple minor lint issues. --- pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py | 3 +-- pubsub/google/cloud/pubsub_v1/publisher/exceptions.py | 4 ++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py index ef5dfcfb11a4..de6b9005c79b 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py @@ -16,7 +16,6 @@ import copy import multiprocessing -import queue import time import uuid @@ -332,7 +331,7 @@ def exception(self, timeout=None, _wait=1): # If the timeout has been exceeded, raise TimeoutError. if timeout and timeout < 0: - raise TimeoutError('Timed out waiting for an exception.') + raise exceptions.TimeoutError('Timed out waiting for exception.') # Wait a little while and try again. time.sleep(_wait) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index e37993b24035..bedc5d5a2a48 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -14,3 +14,7 @@ class PublishError(RuntimeError): pass + + +class TimeoutError(RuntimeError): + pass From d50a22e3667c777cc2647d257189ea7839ede6a0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 27 Jun 2017 09:56:50 -0700 Subject: [PATCH 31/86] Adapting a subscriber that will work. --- pubsub/.flake8 | 6 + .../cloud/pubsub_v1/subscriber/bidi_stream.py | 259 ++++++++++++++++++ .../cloud/pubsub_v1/subscriber/client.py | 4 - .../pubsub_v1/subscriber/consumer/base.py | 83 +++++- .../cloud/pubsub_v1/subscriber/consumer/mp.py | 28 +- .../pubsub_v1/subscriber/helper_threads.py | 121 ++++++++ .../cloud/pubsub_v1/subscriber/message.py | 7 +- 7 files changed, 470 insertions(+), 38 deletions(-) create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/bidi_stream.py create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py diff --git a/pubsub/.flake8 b/pubsub/.flake8 index 25168dc87605..712bd8afe7f4 100644 --- a/pubsub/.flake8 +++ b/pubsub/.flake8 @@ -4,3 +4,9 @@ exclude = .git, *.pyc, conf.py + +ignore = + # Allow "under-indented" continuation lines. + E124, + # Allow closing parentheses to column-match the opening call. + E128 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/bidi_stream.py b/pubsub/google/cloud/pubsub_v1/subscriber/bidi_stream.py new file mode 100644 index 000000000000..c0b844ad2cb3 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/bidi_stream.py @@ -0,0 +1,259 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bidirectional Streaming Consumer. +The goal here is to consume a bidirectional streaming RPC by fanning out the +responses received from the server to be processed and fanning in requests from +the response processors to be sent to the server through the request stream. +This module is a framework to deal with this pattern in a consistent way: + + * A :class:`Consumer` manages scheduling requests to a stream and consuming + responses from a stream. The Consumer takes the responses and schedules + them to be processed in callbacks using any + :class:`~concurrent.futures.Executor`. + * A :class:`Policy` which determines how the consumer calls the RPC and + processes responses, errors, and messages. + +The :class:`Policy` is the only class that's intended to be sub-classed here. +This would be implemented for every bidirectional streaming method. +How does this work? The first part of the implementation, fanning out +responses, its actually quite straightforward and can be done with just a +:class:`concurrent.futures.Executor`: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + } + +The challenge comes from the fact that in bidirectional streaming two more +things have to be done: + + 1. The consumer must maintain a long-running request generator. + 2. The consumer must provide some way for the response processor to queue + new requests. + +These are especially important because in the case of Pub/Sub you are +essentially streaming requests indefinitely and receiving responses +indefinitely. + +For the first challenge, we take advantage of the fact that gRPC runs the +request generator in its own thread. That thread can block, so we can use +a queue for that: + +.. graphviz:: + digraph response_flow { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + } + +The final piece of the puzzle, allowing things from anywhere to queue new +requests, it a bit more complex. If we were only dealing with threads, then the +response workers could just directly interact with the policy/consumer to +queue new requests: + +.. graphviz:: + digraph thread_only_requests { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "Consumer" [label="send_request", color="blue"] + } + +But, because this does not dictate any particular concurrent strategy for +dealing with the responses, it's possible that a response could be processed +in a different thread, process, or even on a different machine. Because of +this, we need an intermediary queue between the callbacks and the gRPC request +queue to bridge the "concurrecy gap". To pump items from the concurrecy-safe +queue into the gRPC request queue, we need another worker thread. Putting this +all together looks like this: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "Policy" -> "QueueCallbackThread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "callback_request_queue" [label="requests", color="blue"] + "callback_request_queue" -> "QueueCallbackThread" + [label="consumed by", color="blue"] + "QueueCallbackThread" -> "Consumer" + [label="send_response", color="blue"] + } + +This part is actually up to the Policy to enable. The consumer just provides a +thread-safe queue for requests. The :cls:`QueueCallbackThread` can be used by +the Policy implementation to spin up the worker thread to pump the +concurrency-safe queue. See the Pub/Sub subscriber implementation for an +example of this. +""" + +import logging +import queue +import threading + +from google.cloud.pubsub_v1.subscriber import helper_threads + +_LOGGER = logging.getLogger(__name__) + + +class BidiStream(object): + """Bi-directional streaming RPC consumer. + + This class coordinates the consumption of a bi-directional streaming RPC. + There is a bit of background information to know before understanding how + this class operates: + + 1. gRPC has its own background thread for dealing with I/O. + 2. gRPC consumes a streaming call's request generator in another + thread. + 3. If the request generator thread exits, gRPC will close the + connection. + + Because of (2) and (3), the consumer must always at least use threading + for some bookkeeping. No matter what, a thread will be created by gRPC to + generate requests. This thread is called the *request generator thread*. + Having the request generator thread allows the consumer to hold the stream + open indefinitely. Now gRPC will send responses as fast as the consumer can + ask for them. The consumer hands these off to the :cls:`Policy` via + :meth:`Policy.on_response`, which should not block. + + Finally, we do not want to block the main thread, so the consumer actually + invokes the RPC itself in a separate thread. This thread is called the + *response consumer helper thread*. + + So all in all there are three threads: + + 1. gRPC's internal I/O thread. + 2. The request generator thread, created by gRPC. + 3. The response consumer helper thread, created by the Consumer. + + In addition, the Consumer likely uses some sort of concurreny to prevent + blocking on processing responses. The Policy may also use another thread to + deal with pumping messages from an external queue into the request queue + here. + + It may seem strange to use threads for something "high performance" + considering the GIL. However, the threads here are not CPU bound. They are + simple threads that are blocked by I/O and generally just move around some + simple objects between queues. The overhead for these helper threads is + low. The Consumer and end-user can configure any sort of executor they want + for the actual processing of the responses, which may be CPU intensive. + """ + + def __init__(self, consumer): + """ + Args: + consumer (Consumer): The consumer. + """ + self._consumer = consumer + self._request_queue = queue.Queue() + self._exiting = threading.Event() + + self.helper_threads = helper_threads.HelperThreadRegistry() + """:cls:`_helper_threads.HelperThreads`: manages the helper threads. + The policy may use this to schedule its own helper threads. + """ + + def send_request(self, request): + """Queue a request to be sent to gRPC. + Args: + request (Any): The request protobuf. + """ + self._request_queue.put(request) + + def _request_generator_thread(self): + """Generate requests for the stream. + + This blocks for new requests on the request queue and yields them to + gRPC. + """ + # Note: gRPC will run this in a separate thread. This can and must + # block to keep the stream open. + initial_request = self._consumer.on_initial_request() + if initial_request is not None: + _LOGGER.debug( + 'Sending initial request: {}'.format(initial_request), + ) + yield initial_request + + while True: + request = self._request_queue.get() + if request == helper_threads.STOP: + _LOGGER.debug('Request generator signaled to stop.') + break + + _LOGGER.debug('Sending request: {}'.format(request)) + yield request + + def _blocking_consume(self): + """Consume the stream indefinitely.""" + while True: + # It is possible that a timeout can cause the stream to not + # exit cleanly when the user has called stop_consuming(). This + # checks to make sure we're not exiting before opening a new + # stream. + if self._exiting.is_set(): + _LOGGER.debug('Event signalled consumer exit.') + break + + request_generator = self._request_generator_thread() + response_generator = self._consumer.call_rpc(request_generator) + try: + for response in response_generator: + self._policy.on_response(response) + + # If the loop above exits without an exception, then the + # request stream terminated cleanly, which should only happen + # when it was signaled to do so by stop_consuming. In this + # case, break out of the while loop and exit this thread. + _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') + break + + except Exception as e: + self._policy.on_exception(e) + + def _consume_thread(self): + """Thread to consume the stream.""" + self._blocking_consume() + + def start_consuming(self): + """Start consuming the stream.""" + self._exiting.clear() + self.helper_threads.start('consume bidirectional stream', + self._request_queue, + self._consume_thread, + ) + self._consumer.initialize(self) + + def stop_consuming(self): + """Signal the stream to stop and block until it completes.""" + self._exiting.set() + self.helper_threads.stop_all() diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index c2bf71c46f78..b2def0bb6409 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -14,15 +14,11 @@ from __future__ import absolute_import -import functools import pkg_resources -import six - from google.cloud.gapic.pubsub.v1 import subscriber_client from google.cloud.pubsub_v1 import _gapic -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber.consumer import mp diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py index 2b48b8a08578..633b96ed3bf2 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py @@ -18,12 +18,14 @@ import six +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import bidi_stream from google.cloud.pubsub_v1.subscriber import histogram @six.add_metaclass(abc.ABCMeta) class BaseConsumer(object): - """Abstract base class for consumers. + """Abstract class defining a subscription consumer. Although the :class:`~.pubsub_v1.subscriber.consumer.mp.Consumer` class, based on :class:`multiprocessing.Process`, is fine for most cases, @@ -40,8 +42,9 @@ def __init__(self, client, subscription, histogram_data=None): Args: client (~.pubsub_v1.subscriber.client): The subscriber client used to create this instance. - subscription (str): The name of the subscription. The canonical format - for this is ``projects/{project}/subscriptions/{subscription}``. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. histogram_data (dict): Optional: A structure to store the histogram data for predicting appropriate ack times. If set, this should be a dictionary-like object. @@ -55,6 +58,7 @@ def __init__(self, client, subscription, histogram_data=None): """ self._client = client self._subscription = subscription + self._bidi_stream = bidi_stream.BidiStream(self) self._ack_deadline = 10 self._last_histogram_size = 0 self.histogram = histogram.Histogram(data=histogram_data) @@ -74,6 +78,18 @@ def ack_deadline(self): self._ack_deadline = self.histogram.percentile(percent=99) return self._ack_deadline + @property + def initial_request(self): + """Return the initial request. + + This defines the intiial request that must always be sent to Pub/Sub + immediately upon opening the subscription. + """ + return types.StreamingPullRequest( + stream_ack_deadline_seconds=self.histogram.percentile(99), + subscription=self.subscription, + ) + @property def subscription(self): """Return the subscription. @@ -83,14 +99,65 @@ def subscription(self): """ return self._subscription - @abc.abstractmethod def ack(self, ack_id): """Acknowledge the message corresponding to the given ack_id.""" - raise NotImplementedError + request = types.StreamingPullRequest(ack_ids=[ack_id]) + self._bidi_stream.send_request(request) + + def call_rpc(self, request_generator): + """Invoke the Pub/Sub streaming pull RPC. + + Args: + request_generator (Generator): A generator that yields requests, + and blocks if there are no outstanding requests (until such + time as there are). + """ + return self._client.api.streaming_pull(request_generator) - @abc.abstractmethod def modify_ack_deadline(self, ack_id, seconds): """Modify the ack deadline for the given ack_id.""" + request = types.StreamingPullRequest( + modify_deadline_ack_ids=[ack_id], + modify_deadline_seconds=[seconds], + ) + self._bidi_stream.send_request(request) + + def nack(self, ack_id): + """Explicitly deny receipt of a message.""" + return self.modify_ack_deadline(ack_id, 0) + + @abc.abstractmethod + def on_response(self, response): + """Process a response from gRPC. + + This gives the consumer control over how responses are scheduled to + be processed. This method is expected to not block and instead + schedule the response to be consumed by some sort of concurrency. + + For example, if a the Policy implementation takes a callback in its + constructor, you can schedule the callback using a + :cls:`concurrent.futures.ThreadPoolExecutor`:: + + self._pool.submit(self._callback, response) + + This is called from the response consumer helper thread. + + Args: + response (Any): The protobuf response from the RPC. + """ + raise NotImplementedError + + @abc.abstractmethod + def on_exception(self, exception): + """Called when a gRPC exception occurs. + + If this method does nothing, then the stream is re-started. If this + raises an exception, it will stop the consumer thread. + This is executed on the response consumer helper thread. + + Args: + exception (Exception): The exception raised by the RPC. + """ raise NotImplementedError @abc.abstractmethod @@ -100,5 +167,9 @@ def open(self, callback): For each message received, the ``callback`` function is fired with a :class:`~.pubsub_v1.subscriber.message.Message` as its only argument. + + Args: + callback (Callable[Message]): A callable that receives a + Pub/Sub Message. """ raise NotImplementedError diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py index a1bdc6d2cc4e..1be7b519d1f3 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py @@ -14,16 +14,13 @@ from __future__ import absolute_import -import abc +from concurrent import futures import multiprocessing -import six - from google.gax.errors import GaxError from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import exceptions -from google.cloud.pubsub_v1.subscriber import histogram from google.cloud.pubsub_v1.subscriber.consumer import base @@ -38,14 +35,17 @@ def __init__(self, client, subscription): self._manager = multiprocessing.Manager() self._shared = self._manager.Namespace() self._shared.subscription = subscription - self._shared.outgoing_requests = self._manager.list() self._shared.histogram_data = self._manager.dict() + self._shared.request_queue = self._manager.Queue() # Call the superclass constructor. super(Consumer, self).__init__(client, subscription, histogram_data=self._shared.histogram_data, ) + # Also maintain a request queue and an executor. + self._executor = futures.ProcessPoolExecutor() + # Keep track of the GRPC connection. self._process = None @@ -58,24 +58,11 @@ def subscription(self): """ return self._shared.subscription - def ack(self, ack_id): - """Acknowledge the message corresponding to the given ack_id.""" - self._shared.outgoing_requests.append(types.StreamingPullRequest( - ack_ids=[ack_id], - )) - def close(self): """Close the existing connection.""" self._process.terminate() self._process = None - def modify_ack_deadline(self, ack_id, seconds): - """Modify the ack deadline for the given ack_id.""" - self._shared.outgoing_requests.append(types.StreamingPullRequest( - modify_deadline_ack_ids=[ack_id], - modify_deadline_seconds=[seconds], - )) - def open(self, callback): """Open a streaming pull connection and begin receiving messages. @@ -106,12 +93,7 @@ def stream(self): subscription=self._subscription, )) - import sys try: outgoing = iter(self._shared.outgoing_requests) - import pdb ; pdb.set_trace() - for r in self._client.api.streaming_pull(outgoing): - import pdb ; pdb.set_trace() - print(r, file=sys.stderr) except GaxError: return self.stream() diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py b/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py new file mode 100644 index 000000000000..07ada2a0def3 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py @@ -0,0 +1,121 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import logging +import threading +import uuid + +import six + +__all__ = ( + 'HelperThreadRegistry', + 'QueueCallbackThread', + 'STOP', +) + +_LOGGER = logging.getLogger(__name__) + +_HelperThread = collections.namedtuple('HelperThreads', + ['name', 'thread', 'queue'], +) + + +# Helper thread stop indicator. This could be a sentinel object or None, +# but the sentinel object's ID can change if the process is forked, and +# None has the possibility of a user accidentally killing the helper +# thread. +STOP = uuid.uuid4() + + +class HelperThreadRegistry(object): + def __init__(self): + self._helper_threads = {} + + def start(self, name, queue, target, *args, **kwargs): + """Create and start a helper thread. + + Args: + name (str): The name of the helper thread. + queue (Queue): A concurrency-safe queue. + target (Callable): The target of the thread. + args: Additional args passed to the thread constructor. + kwargs: Additional kwargs passed to the thread constructor. + + Returns: + threading.Thread: The created thread. + """ + # Create and start the helper thread. + thread = threading.Thread( + name='Consumer helper: {}'.format(name), + target=target, + *args, **kwargs + ) + thread.daemon = True + thread.start() + + # Keep track of the helper thread, so we are able to stop it. + self._helper_threads[name] = _HelperThread(name, thread, queue) + _LOGGER.debug('Started helper thread {}'.format(name)) + return thread + + def stop(self, name): + """Stops a helper thread. + + Sends the stop message and blocks until the thread joins. + + Args: + name (str): The name of the thread. + """ + # Attempt to retrieve the thread; if it is gone already, no-op. + helper_thread = self._helper_threads.get(name) + if helper_thread.thread is None: + return + + # Join the thread if it is still alive. + if helper_thread.thread.is_alive(): + _LOGGER.debug('Stopping helper thread {}'.format(name)) + helper_thread.queue.put(STOP) + helper_thread.thread.join() + + # Remove the thread from our tracking. + self._helper_threads.pop(name, None) + + def stop_all(self): + """Stop all helper threads.""" + # This could be more efficient by sending the stop signal to all + # threads before joining any of them. + for name in list(six.iterkeys(self._helper_threads)): + self.stop(name) + + +class QueueCallbackThread(object): + """A helper thread that executes a callback for every item in + the queue. + """ + def __init__(self, queue, callback): + self.queue = queue + self._callback = callback + + def __call__(self): + while True: + item = self.queue.get() + if item == STOP: + break + + # This doesn't presently deal with exceptions that bubble up + # through the callback. If there is an error here, the thread will + # exit and no further queue items will be processed. We could + # potentially capture errors, log them, and then continue on. + self._callback(item) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 1433a5df6805..26b27ea5c4cb 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -56,9 +56,6 @@ def __init__(self, consumer, ack_id, message): self._ack_id = ack_id self._message = message self.message_id = message.message_id - self.data = message.data - self.attributes = message.attributes - self.publish_time = message.publish_time # The instantiation time is the time that this message # was received. Tracking this provides us a way to be smart about @@ -68,11 +65,11 @@ def __init__(self, consumer, ack_id, message): def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._data - if len(answer) > 50: + if len(abbv_data) > 50: abbv_data = abbv_data[0:50] + b'...' # Return a useful representation. - answer = 'Message {\n' + answer = 'Message {\n' answer += ' data: {0!r}\n'.format(abbv_data) answer += ' attributes: {0!r}\n'.format(self.attributes) answer += '}' From 14f200af5fd4a3f5052b5cc3a8b8257828fd78fe Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 27 Jun 2017 14:51:40 -0700 Subject: [PATCH 32/86] WIP --- .../{bidi_stream.py => consumer.py} | 17 ++--- .../cloud/pubsub_v1/subscriber/message.py | 21 ++++--- .../{consumer => policy}/__init__.py | 0 .../subscriber/{consumer => policy}/base.py | 20 +++--- .../subscriber/{consumer => policy}/mp.py | 63 +++++++++---------- 5 files changed, 65 insertions(+), 56 deletions(-) rename pubsub/google/cloud/pubsub_v1/subscriber/{bidi_stream.py => consumer.py} (96%) rename pubsub/google/cloud/pubsub_v1/subscriber/{consumer => policy}/__init__.py (100%) rename pubsub/google/cloud/pubsub_v1/subscriber/{consumer => policy}/base.py (91%) rename pubsub/google/cloud/pubsub_v1/subscriber/{consumer => policy}/mp.py (62%) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/bidi_stream.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py similarity index 96% rename from pubsub/google/cloud/pubsub_v1/subscriber/bidi_stream.py rename to pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index c0b844ad2cb3..eb74e7300a5d 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/bidi_stream.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -13,6 +13,7 @@ # limitations under the License. """Bidirectional Streaming Consumer. + The goal here is to consume a bidirectional streaming RPC by fanning out the responses received from the server to be processed and fanning in requests from the response processors to be sent to the server through the request stream. @@ -124,7 +125,7 @@ _LOGGER = logging.getLogger(__name__) -class BidiStream(object): +class Consumer(object): """Bi-directional streaming RPC consumer. This class coordinates the consumption of a bi-directional streaming RPC. @@ -168,12 +169,13 @@ class BidiStream(object): for the actual processing of the responses, which may be CPU intensive. """ - def __init__(self, consumer): + def __init__(self, policy): """ Args: - consumer (Consumer): The consumer. + policy (Consumer): The consumer policy, which defines how + requests and responses are handled. """ - self._consumer = consumer + self._policy = policy self._request_queue = queue.Queue() self._exiting = threading.Event() @@ -184,6 +186,7 @@ def __init__(self, consumer): def send_request(self, request): """Queue a request to be sent to gRPC. + Args: request (Any): The request protobuf. """ @@ -197,7 +200,7 @@ def _request_generator_thread(self): """ # Note: gRPC will run this in a separate thread. This can and must # block to keep the stream open. - initial_request = self._consumer.on_initial_request() + initial_request = self._policy.on_initial_request() if initial_request is not None: _LOGGER.debug( 'Sending initial request: {}'.format(initial_request), @@ -225,7 +228,7 @@ def _blocking_consume(self): break request_generator = self._request_generator_thread() - response_generator = self._consumer.call_rpc(request_generator) + response_generator = self._policy.call_rpc(request_generator) try: for response in response_generator: self._policy.on_response(response) @@ -251,7 +254,7 @@ def start_consuming(self): self._request_queue, self._consume_thread, ) - self._consumer.initialize(self) + self._policy.initialize(self) def stop_consuming(self): """Signal the stream to stop and block until it completes.""" diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 26b27ea5c4cb..92c32da75edb 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -42,17 +42,23 @@ class Message(object): publish_time (datetime): The time that this message was originally published. """ - def __init__(self, consumer, ack_id, message): + def __init__(self, policy, ack_id, message): """Construct the Message. + .. note:: + + This class should not be constructed directly; it is the + responsibility of :class:`BasePolicy` subclasses to do so. + Args: - consumer (~.pubsub_v1.subscriber.consumer.BaseConsumer): The - consumer which originally received this message. + policy (~.pubsub_v1.subscriber.policy.BasePolicy): The policy + that created this message, and understands how to handle + actions from that message (e.g. acks). ack_id (str): The ack_id received from Pub/Sub. message (~.pubsub_v1.types.PubsubMessage): The message received from Pub/Sub. """ - self._consumer = consumer + self._policy = policy self._ack_id = ack_id self._message = message self.message_id = message.message_id @@ -112,13 +118,14 @@ def ack(self): you receive the message again. .. warning:: + Acks in Pub/Sub are best effort. You should always ensure that your processing code is idempotent, as you may receive any given message more than once. """ time_to_ack = math.ceil(time.time() - self._received_timestamp) - self._consumer.histogram.add(time_to_ack) - self._consumer.ack(self._ack_id) + self._policy.histogram.add(time_to_ack) + self._policy.ack(self._ack_id) def modify_ack_deadline(self, seconds): """Set the deadline for acknowledgement to the given value. @@ -138,7 +145,7 @@ def modify_ack_deadline(self, seconds): to. This should be between 0 and 600. Due to network latency, values below 10 are advised against. """ - self._consumer.modify_ack_deadline(self._ack_id, seconds) + self._policy.modify_ack_deadline(self._ack_id, seconds) def nack(self): """Decline to acknowldge the given message. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py similarity index 100% rename from pubsub/google/cloud/pubsub_v1/subscriber/consumer/__init__.py rename to pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py similarity index 91% rename from pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py rename to pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 633b96ed3bf2..88650e59b845 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -19,25 +19,25 @@ import six from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import bidi_stream +from google.cloud.pubsub_v1.subscriber import consumer from google.cloud.pubsub_v1.subscriber import histogram @six.add_metaclass(abc.ABCMeta) -class BaseConsumer(object): - """Abstract class defining a subscription consumer. +class BasePolicy(object): + """Abstract class defining a subscription policy. - Although the :class:`~.pubsub_v1.subscriber.consumer.mp.Consumer` class, + Although the :class:`~.pubsub_v1.subscriber.policy.mp.Policy` class, based on :class:`multiprocessing.Process`, is fine for most cases, advanced users may need to implement something based on a different concurrency model. - This class defines the interface for the consumer implementation; - subclasses may be passed as the ``consumer_class`` argument to + This class defines the interface for the policy implementation; + subclasses may be passed as the ``policy_class`` argument to :class:`~.pubsub_v1.client.SubscriberClient`. """ def __init__(self, client, subscription, histogram_data=None): - """Instantiate the consumer. + """Instantiate the policy. Args: client (~.pubsub_v1.subscriber.client): The subscriber client used @@ -58,7 +58,7 @@ def __init__(self, client, subscription, histogram_data=None): """ self._client = client self._subscription = subscription - self._bidi_stream = bidi_stream.BidiStream(self) + self._consumer = consumer.Consumer(self) self._ack_deadline = 10 self._last_histogram_size = 0 self.histogram = histogram.Histogram(data=histogram_data) @@ -102,7 +102,7 @@ def subscription(self): def ack(self, ack_id): """Acknowledge the message corresponding to the given ack_id.""" request = types.StreamingPullRequest(ack_ids=[ack_id]) - self._bidi_stream.send_request(request) + self._consumer.send_request(request) def call_rpc(self, request_generator): """Invoke the Pub/Sub streaming pull RPC. @@ -120,7 +120,7 @@ def modify_ack_deadline(self, ack_id, seconds): modify_deadline_ack_ids=[ack_id], modify_deadline_seconds=[seconds], ) - self._bidi_stream.send_request(request) + self._consumer.send_request(request) def nack(self, ack_id): """Explicitly deny receipt of a message.""" diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py similarity index 62% rename from pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py rename to pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py index 1be7b519d1f3..356c06fe6d94 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py @@ -17,20 +17,21 @@ from concurrent import futures import multiprocessing -from google.gax.errors import GaxError - -from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import exceptions +from google.cloud.pubsub_v1.subscriber import helper_threads from google.cloud.pubsub_v1.subscriber.consumer import base +from google.cloud.pubsub_v1.subscriber.message import Message -class Consumer(base.BaseConsumer): +class Policy(base.BasePolicy): """A consumer class based on :class:``multiprocessing.Process``. This consumer handles the connection to the Pub/Sub service and all of the concurrency needs. """ def __init__(self, client, subscription): + # Default the callback to a no-op; it is provided by `.open`. + self._callback = lambda message: None + # Create a manager for keeping track of shared state. self._manager = multiprocessing.Manager() self._shared = self._manager.Namespace() @@ -39,12 +40,16 @@ def __init__(self, client, subscription): self._shared.request_queue = self._manager.Queue() # Call the superclass constructor. - super(Consumer, self).__init__(client, subscription, + super(Policy, self).__init__(client, subscription, histogram_data=self._shared.histogram_data, ) # Also maintain a request queue and an executor. self._executor = futures.ProcessPoolExecutor() + self._callback_requests = helper_threads.QueueCallbackThread( + self._shared.request_queue, + self._on_callback_request, + ) # Keep track of the GRPC connection. self._process = None @@ -60,8 +65,7 @@ def subscription(self): def close(self): """Close the existing connection.""" - self._process.terminate() - self._process = None + self._consumer.helper_threads.stop('callback requests worker') def open(self, callback): """Open a streaming pull connection and begin receiving messages. @@ -73,27 +77,22 @@ def open(self, callback): Args: callback (function): The callback function. """ - # Sanity check: If the connection is already open, fail. - if self._process is not None: - raise exceptions.AlreadyOpen(self._subscription) - - # Open the request. - self._process = multiprocessing.Process(self.stream) - self._process.daemon = True - self._process.start() - - def stream(self): - """Stream data to and from the Cloud Pub/Sub service.""" - - # The streaming connection expects a series of StreamingPullRequest - # objects. The first one must specify the subscription and the - # ack deadline; prepend this to the list. - self._shared.outgoing_requests.insert(0, types.StreamingPullRequest( - stream_ack_deadline_seconds=self.ack_deadline, - subscription=self._subscription, - )) - - try: - outgoing = iter(self._shared.outgoing_requests) - except GaxError: - return self.stream() + self._callback = callback + self._consumer.helper_threads.start('callback requests worker', + self._shared.request_queue, + self._callback_requests, + ) + + def on_callback_request(self, callback_request): + """Map the callback request to the appropriate GRPC request.""" + action, args = callback_request[0], callback_request[1:] + getattr(self, action)(*args) + + def on_response(self, response): + """Process all received Pub/Sub messages. + + For each message, schedule a callback with the executor. + """ + for msg in response.received_messages: + message = Message(self, msg.ack_id, msg.message) + self._executor.submit(self._callback, message) From 6a7e846aad99af762bb5a7c14b65e3fc3a17ec09 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 28 Jun 2017 08:57:56 -0700 Subject: [PATCH 33/86] Implement lease management. --- .../cloud/pubsub_v1/subscriber/consumer.py | 8 +- .../cloud/pubsub_v1/subscriber/message.py | 31 ++++++- .../cloud/pubsub_v1/subscriber/policy/base.py | 89 ++++++++++++++++++- .../cloud/pubsub_v1/subscriber/policy/mp.py | 23 ++++- 4 files changed, 138 insertions(+), 13 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index eb74e7300a5d..8e22396e548b 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -168,7 +168,6 @@ class Consumer(object): low. The Consumer and end-user can configure any sort of executor they want for the actual processing of the responses, which may be CPU intensive. """ - def __init__(self, policy): """ Args: @@ -243,18 +242,13 @@ def _blocking_consume(self): except Exception as e: self._policy.on_exception(e) - def _consume_thread(self): - """Thread to consume the stream.""" - self._blocking_consume() - def start_consuming(self): """Start consuming the stream.""" self._exiting.clear() self.helper_threads.start('consume bidirectional stream', self._request_queue, - self._consume_thread, + self._blocking_consume, ) - self._policy.initialize(self) def stop_consuming(self): """Signal the stream to stop and block until it completes.""" diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 92c32da75edb..5690fd1c6d0d 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -68,6 +68,10 @@ def __init__(self, policy, ack_id, message): # the default lease deadline. self._received_timestamp = time.time() + # The policy should lease this message, telling PubSub that it has + # it until it is acked or otherwise dropped. + self.lease() + def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._data @@ -118,7 +122,6 @@ def ack(self): you receive the message again. .. warning:: - Acks in Pub/Sub are best effort. You should always ensure that your processing code is idempotent, as you may receive any given message more than once. @@ -126,6 +129,31 @@ def ack(self): time_to_ack = math.ceil(time.time() - self._received_timestamp) self._policy.histogram.add(time_to_ack) self._policy.ack(self._ack_id) + self.release() + + def drop(self): + """Release the message from lease management. + + This informs the policy to no longer hold on to the lease for this + message. Pub/Sub will re-deliver the message if it is not acknowledged + before the existing lease expires. + + .. warning:: + For most use cases, the only reason to drop a message from + lease management is on :meth:`ack` or :meth:`nack`; these methods + both call this one. You probably do not want to call this method + directly. + """ + self._policy.drop(self._ack_id) + + def lease(self): + """Inform the policy to lease this message continually. + + .. note:: + This method is called by the constructor, and you should never + need to call it manually. + """ + self._policy.lease(self._ack_id) def modify_ack_deadline(self, seconds): """Set the deadline for acknowledgement to the given value. @@ -153,3 +181,4 @@ def nack(self): This will cause the message to be re-delivered to the subscription. """ self.modify_ack_deadline(seconds=0) + self.release() diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 88650e59b845..2dd68c8b6f3f 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -15,6 +15,8 @@ from __future__ import absolute_import import abc +import random +import time import six @@ -90,6 +92,17 @@ def initial_request(self): subscription=self.subscription, ) + @property + def managed_ack_ids(self): + """Return the ack IDs currently being managed by the policy. + + Returns: + set: The set of ack IDs being managed. + """ + if not hasattr(self, '_managed_ack_ids'): + self._managed_ack_ids = set() + return self._managed_ack_ids + @property def subscription(self): """Return the subscription. @@ -100,7 +113,11 @@ def subscription(self): return self._subscription def ack(self, ack_id): - """Acknowledge the message corresponding to the given ack_id.""" + """Acknowledge the message corresponding to the given ack_id. + + Args: + ack_id (str): The ack ID. + """ request = types.StreamingPullRequest(ack_ids=[ack_id]) self._consumer.send_request(request) @@ -114,8 +131,70 @@ def call_rpc(self, request_generator): """ return self._client.api.streaming_pull(request_generator) + def drop(self, ack_id): + """Remove the given ack ID from lease management. + + Args: + ack_id (str): The ack ID. + """ + self.managed_ack_ids.remove(ack_id) + + def lease(self, ack_id): + """Add the given ack ID to lease management. + + Args: + ack_id (str): The ack ID. + """ + self.managed_ack_ids.add(ack_id) + + def maintain_leases(self): + """Maintain all of the leases being managed by the policy. + + This method modifies the ack deadline for all of the managed + ack IDs, then waits for most of that time (but with jitter), and + then calls itself. + + .. warning:: + This method blocks, and generally should be run in a separate + thread or process. + + Additionally, you should not have to call this method yourself, + unless you are implementing your own policy. If you are + implementing your own policy, you _should_ call this method + in an appropriate form of subprocess. + """ + # Determine the appropriate duration for the lease. + # This is based off of how long previous messages have taken to ack, + # with a sensible default and within the ranges allowed by Pub/Sub. + p99 = self.histogram.percentile(99) + + # Create a streaming pull request. + # We do not actually call `modify_ack_deadline` over and over because + # it is more efficient to make a single request. + ack_ids = list(self.managed_ack_ids) + if len(ack_ids) > 0: + request = types.StreamingPullRequest( + modify_deadline_ack_ids=ack_ids, + modify_deadline_seconds=[p99] * len(ack_ids), + ) + self._consumer.send_request(request) + + # Now wait an appropriate period of time and do this again. + # + # We determine the appropriate period of time based on a random + # period between 0 seconds and 90% of the lease. This use of + # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases + # where there are many clients. + time.sleep(random.uniform(0.0, p99 * 0.9)) + self.maintain_managed_leases() + def modify_ack_deadline(self, ack_id, seconds): - """Modify the ack deadline for the given ack_id.""" + """Modify the ack deadline for the given ack_id. + + Args: + ack_id (str): The ack ID + seconds (int): The number of seconds to set the new deadline to. + """ request = types.StreamingPullRequest( modify_deadline_ack_ids=[ack_id], modify_deadline_seconds=[seconds], @@ -123,7 +202,11 @@ def modify_ack_deadline(self, ack_id, seconds): self._consumer.send_request(request) def nack(self, ack_id): - """Explicitly deny receipt of a message.""" + """Explicitly deny receipt of a message. + + Args: + ack_id (str): The ack ID. + """ return self.modify_ack_deadline(ack_id, 0) @abc.abstractmethod diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py index 356c06fe6d94..c2c25bc830fa 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py @@ -36,6 +36,7 @@ def __init__(self, client, subscription): self._manager = multiprocessing.Manager() self._shared = self._manager.Namespace() self._shared.subscription = subscription + self._shared.managed_ack_ids = self._manager.set() self._shared.histogram_data = self._manager.dict() self._shared.request_queue = self._manager.Queue() @@ -51,8 +52,19 @@ def __init__(self, client, subscription): self._on_callback_request, ) - # Keep track of the GRPC connection. - self._process = None + # Spawn a process that maintains all of the leases for this policy. + self._lease_process = multiprocessing.Process(self.maintain_leases) + self._lease_process.daemon = True + self._lease_process.start() + + @property + def managed_ack_ids(self): + """Return the ack IDs currently being managed by the policy. + + Returns: + set: The set of ack IDs being managed. + """ + return self._shared.managed_ack_ids @property def subscription(self): @@ -88,6 +100,13 @@ def on_callback_request(self, callback_request): action, args = callback_request[0], callback_request[1:] getattr(self, action)(*args) + def on_exception(self, exception): + """Bubble the exception. + + This will cause the stream to exit loudly. + """ + raise exception + def on_response(self, response): """Process all received Pub/Sub messages. From 3bb130bd5b7493c764afbda5d5b1e4957ffb1b9e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 29 Jun 2017 11:04:09 -0700 Subject: [PATCH 34/86] WIP --- .../cloud/pubsub_v1/subscriber/client.py | 14 +++---- .../cloud/pubsub_v1/subscriber/consumer.py | 13 +++--- .../cloud/pubsub_v1/subscriber/policy/base.py | 2 +- .../cloud/pubsub_v1/subscriber/policy/mp.py | 42 +++++++++++++------ 4 files changed, 42 insertions(+), 29 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index b2def0bb6409..410d6ea130fc 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -19,7 +19,7 @@ from google.cloud.gapic.pubsub.v1 import subscriber_client from google.cloud.pubsub_v1 import _gapic -from google.cloud.pubsub_v1.subscriber.consumer import mp +from google.cloud.pubsub_v1.subscriber.policy import mp __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -38,19 +38,19 @@ class SubscriberClient(object): flow_control (~.pubsub_v1.types.FlowControl): The flow control settings. Use this to prevent situations where you are inundated with too many messages at once. - consumer_class (class): A class that describes how to handle + policy_class (class): A class that describes how to handle subscriptions. You may subclass the - :class:`.pubsub_v1.subscriber.consumer.base.BaseConsumer` + :class:`.pubsub_v1.subscriber.policy.base.BasePolicy` class in order to define your own consumer. This is primarily provided to allow use of different concurrency models; the default is based on :class:`multiprocessing.Process`. - **kwargs (dict): Any additional arguments provided are sent as keyword + kwargs (dict): Any additional arguments provided are sent as keyword keyword arguments to the underlying :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. Generally, you should not need to set additional keyword arguments. """ - def __init__(self, flow_control=(), consumer_class=mp.Consumer, + def __init__(self, flow_control=(), policy_class=mp.Policy, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. @@ -60,7 +60,7 @@ def __init__(self, flow_control=(), consumer_class=mp.Consumer, # The subcription class is responsible to retrieving and dispatching # messages. - self._consumer_class = consumer_class + self._policy_class = policy_class def subscribe(self, subscription, callback=None): """Return a representation of an individual subscription. @@ -94,7 +94,7 @@ def subscribe(self, subscription, callback=None): ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance of the defined ``consumer_class`` on the client. """ - subscr = self._consumer_class(self, subscription) + subscr = self._policy_class(self, subscription) if callable(callback): subscr.open(callback) return subscr diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index 8e22396e548b..2478196bd04f 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -197,15 +197,12 @@ def _request_generator_thread(self): This blocks for new requests on the request queue and yields them to gRPC. """ - # Note: gRPC will run this in a separate thread. This can and must - # block to keep the stream open. - initial_request = self._policy.on_initial_request() - if initial_request is not None: - _LOGGER.debug( - 'Sending initial request: {}'.format(initial_request), - ) - yield initial_request + # First, yield the initial request. This occurs on every new + # connection, fundamentally including a resumed connection. + yield self._policy.initial_request + # Now yield each of the items on the request queue, and block if there + # are none. This can and must block to keep the stream open. while True: request = self._request_queue.get() if request == helper_threads.STOP: diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 2dd68c8b6f3f..3ddc072f2925 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -186,7 +186,7 @@ def maintain_leases(self): # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. time.sleep(random.uniform(0.0, p99 * 0.9)) - self.maintain_managed_leases() + self.maintain_leases() def modify_ack_deadline(self, ack_id, seconds): """Modify the ack deadline for the given ack_id. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py index c2c25bc830fa..896914c46305 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py @@ -15,13 +15,25 @@ from __future__ import absolute_import from concurrent import futures +from multiprocessing import managers +import logging import multiprocessing from google.cloud.pubsub_v1.subscriber import helper_threads -from google.cloud.pubsub_v1.subscriber.consumer import base +from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber.message import Message +logger = logging.getLogger(__name__) + + +# Allow sets to be able to be run through the managers; ensure they are +# iterable and have add/remove. +managers.SyncManager.register('set', set, + exposed=('__contains__', '__iter__', 'add', 'remove'), +) + + class Policy(base.BasePolicy): """A consumer class based on :class:``multiprocessing.Process``. @@ -34,26 +46,28 @@ def __init__(self, client, subscription): # Create a manager for keeping track of shared state. self._manager = multiprocessing.Manager() - self._shared = self._manager.Namespace() - self._shared.subscription = subscription - self._shared.managed_ack_ids = self._manager.set() - self._shared.histogram_data = self._manager.dict() - self._shared.request_queue = self._manager.Queue() + self._shared = self._manager.Namespace(subscription=subscription) + self._managed_ack_ids = self._manager.set() + self._request_queue = self._manager.Queue() # Call the superclass constructor. super(Policy, self).__init__(client, subscription, - histogram_data=self._shared.histogram_data, + histogram_data=self._manager.dict(), ) # Also maintain a request queue and an executor. + logger.debug('Creating callback requests thread (not starting).') self._executor = futures.ProcessPoolExecutor() self._callback_requests = helper_threads.QueueCallbackThread( - self._shared.request_queue, - self._on_callback_request, + self._request_queue, + self.on_callback_request, ) # Spawn a process that maintains all of the leases for this policy. - self._lease_process = multiprocessing.Process(self.maintain_leases) + logger.debug('Spawning lease process.') + self._lease_process = multiprocessing.Process( + target=self.maintain_leases, + ) self._lease_process.daemon = True self._lease_process.start() @@ -64,7 +78,7 @@ def managed_ack_ids(self): Returns: set: The set of ack IDs being managed. """ - return self._shared.managed_ack_ids + return self._managed_ack_ids @property def subscription(self): @@ -87,13 +101,15 @@ def open(self, callback): argument. Args: - callback (function): The callback function. + callback (Callable): The callback function. """ + logger.debug('Starting callback requests worker.') self._callback = callback self._consumer.helper_threads.start('callback requests worker', - self._shared.request_queue, + self._request_queue, self._callback_requests, ) + self._consumer.start_consuming() def on_callback_request(self, callback_request): """Map the callback request to the appropriate GRPC request.""" From f97dc237d5ae79ec184b87024c197891f19527ea Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 29 Jun 2017 12:21:40 -0700 Subject: [PATCH 35/86] WIP --- pubsub/google/cloud/pubsub_v1/subscriber/consumer.py | 1 + pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index 2478196bd04f..5cc583c7e5ec 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -227,6 +227,7 @@ def _blocking_consume(self): response_generator = self._policy.call_rpc(request_generator) try: for response in response_generator: + print(response) self._policy.on_response(response) # If the loop above exits without an exception, then the diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 3ddc072f2925..caa8c3580313 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -20,6 +20,8 @@ import six +from google import gax + from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import consumer from google.cloud.pubsub_v1.subscriber import histogram @@ -129,7 +131,9 @@ def call_rpc(self, request_generator): and blocks if there are no outstanding requests (until such time as there are). """ - return self._client.api.streaming_pull(request_generator) + return self._client.api.streaming_pull(request_generator, + options=gax.CallOptions(timeout=600), + ) def drop(self, ack_id): """Remove the given ack ID from lease management. From 303436c430864364f571a05acc698a6b1f57d2b3 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 5 Jul 2017 08:18:18 -0700 Subject: [PATCH 36/86] WIP --- pubsub/google/cloud/pubsub_v1/subscriber/consumer.py | 4 ++-- pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py | 9 +++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index 5cc583c7e5ec..2026b30f3f2c 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -227,7 +227,6 @@ def _blocking_consume(self): response_generator = self._policy.call_rpc(request_generator) try: for response in response_generator: - print(response) self._policy.on_response(response) # If the loop above exits without an exception, then the @@ -236,7 +235,8 @@ def _blocking_consume(self): # case, break out of the while loop and exit this thread. _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') break - + except KeyboardInterrupt: + self.stop_consuming() except Exception as e: self._policy.on_exception(e) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py index 896914c46305..76129707cdd1 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py @@ -19,6 +19,8 @@ import logging import multiprocessing +import grpc + from google.cloud.pubsub_v1.subscriber import helper_threads from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber.message import Message @@ -121,6 +123,13 @@ def on_exception(self, exception): This will cause the stream to exit loudly. """ + # If this is DEADLINE_EXCEEDED, then we want to retry. + # That entails just returning None. + deadline_exceeded = grpc.StatusCode.DEADLINE_EXCEEDED + if getattr(exception, 'code', lambda: None)() == deadline_exceeded: + return + + # Raise any other exception. raise exception def on_response(self, response): From 933d2f3589195887dbf735bbd14d2f1a5eaf5e91 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 13 Jul 2017 11:17:42 -0700 Subject: [PATCH 37/86] WIP --- .../cloud/pubsub_v1/publisher/batch/base.py | 122 +++++++++--- .../publisher/batch/{mp.py => thread.py} | 182 ++++++++++-------- .../cloud/pubsub_v1/publisher/client.py | 31 ++- .../cloud/pubsub_v1/subscriber/client.py | 6 +- .../cloud/pubsub_v1/subscriber/consumer.py | 5 +- .../cloud/pubsub_v1/subscriber/histogram.py | 2 +- .../cloud/pubsub_v1/subscriber/message.py | 4 +- .../cloud/pubsub_v1/subscriber/policy/base.py | 15 +- .../subscriber/policy/{mp.py => thread.py} | 47 +---- 9 files changed, 235 insertions(+), 179 deletions(-) rename pubsub/google/cloud/pubsub_v1/publisher/batch/{mp.py => thread.py} (69%) rename pubsub/google/cloud/pubsub_v1/subscriber/policy/{mp.py => thread.py} (75%) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index d0e9e3885d2a..263c18e56a80 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -24,8 +24,8 @@ class BaseBatch(object): """The base batching class for Pub/Sub publishing. - Although the :class:`~.pubsub_v1.publisher.batch.mp.Batch` class, based - on :class:`multiprocessing.Process`, is fine for most cases, advanced + Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based + on :class:`threading.Thread`, is fine for most cases, advanced users may need to implement something based on a different concurrency model. @@ -33,6 +33,10 @@ class BaseBatch(object): subclasses may be passed as the ``batch_class`` argument to :class:`~.pubsub_v1.client.PublisherClient`. """ + def __len__(self): + """Return the number of messages currently in the batch.""" + return len(self.messages) + @property @abc.abstractmethod def client(self): @@ -44,47 +48,95 @@ def client(self): raise NotImplementedError @property + @abc.abstractmethod + def client(self): + """Return the client used to create this batch. + + Returns: + ~.pubsub_v1.client.PublisherClient: A publisher client. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def messages(self): + """Return the messages currently in the batch. + + Returns: + Sequence: The messages currently in the batch. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + raise NotImplementedError + + @property + @abc.abstractmethod def status(self): """Return the status of this batch. Returns: str: The status of this batch. All statuses are human-readable, - all-lowercase strings, and represented in the - :class:`BaseBatch.Status` enum. + all-lowercase strings. The ones represented in the + :class:`BaseBatch.Status` enum are special, but other statuses + are permitted. """ raise NotImplementedError - def publish(self, data, **attrs): - """Publish a single message. + def will_accept(self, message): + """Return True if the batch is able to accept the message. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + bool: Whether this batch can accept the message. + """ + # If this batch is not accepting messages generally, return False. + if self.status != self.Status.ACCEPTING_MESSAGES: + return False - .. note:: - Messages in Pub/Sub are blobs of bytes. They are *binary* data, - not text. You must send data as a bytestring - (``bytes`` in Python 3; ``str`` in Python 2), and this library - will raise an exception if you send a text string. + # If this batch can not hold the message in question, return False. + if self.size + message.ByteSize() > self.settings.max_bytes: + return False - The reason that this is so important (and why we do not try to - coerce for you) is because Pub/Sub is also platform independent - and there is no way to know how to decode messages properly on - the other side; therefore, encoding and decoding is a required - exercise for the developer. + # Okay, everything is good. + return True + + @abc.abstractmethod + def publish(self, message): + """Publish a single message. Add the given message to this object; this will cause it to be published once the batch either has enough messages or a sufficient period of time has elapsed. - Args: - data (bytes): A bytestring representing the message body. This - must be a bytestring (a text string will raise TypeError). - attrs (Mapping[str, str]): A dictionary of attributes to be - sent as metadata. (These may be text strings or byte strings.) + This method is called by :meth:`~.PublisherClient.publish`. - Raises: - TypeError: If the ``data`` sent is not a bytestring, or if the - ``attrs`` are not either a ``str`` or ``bytes``. + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: - ~.pubsub_v1.publisher.future.Future: An object conforming to the + ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the :class:`concurrent.futures.Future` interface. """ raise NotImplementedError @@ -101,7 +153,19 @@ class Status(object): SUCCESS = 'success' -# Make a fake batch. This is used by the client to do single-op checks -# for batch existence. -FakeBatch = collections.namedtuple('FakeBatch', ['status']) -FAKE = FakeBatch(status='fake') +class RejectionBatch(object): + """A fake batch-like object that refuses to accept any message. + + This is used by the client to do single-op checks for batch + existence. + """ + def will_accept(self, message): + """Return False. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + bool: Whether this batch can accept the message. It never can. + """ + return False diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py similarity index 69% rename from pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py rename to pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index de6b9005c79b..95418dc9cada 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -14,13 +14,10 @@ from __future__ import absolute_import -import copy -import multiprocessing +import threading import time import uuid -import six - from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.batch import base @@ -53,7 +50,7 @@ class Batch(base.BaseBatch): create this batch. topic (str): The topic. The format for this is ``projects/{project}/topics/{topic}``. - settings (~.pubsub_v1.types.Batching): The settings for batch + settings (~.pubsub_v1.types.BatchSettings): The settings for batch publishing. These should be considered immutable once the batch has been opened. autocommit (bool): Whether to autocommit the batch when the time @@ -62,27 +59,26 @@ class Batch(base.BaseBatch): """ def __init__(self, client, topic, settings, autocommit=True): self._client = client - self._manager = multiprocessing.Manager() # Create a namespace that is owned by the client manager; this # is necessary to be able to have these values be communicable between # processes. - self._shared = self.manager.Namespace() - self._shared.futures = self.manager.list() - self._shared.messages = self.manager.list() - self._shared.message_ids = self.manager.dict() - self._shared.settings = settings - self._shared.status = self.Status.ACCEPTING_MESSAGES - self._shared.topic = topic + self._futures = [] + self._messages = [] + self._size = 0 + self._message_ids = {} + self._settings = settings + self._status = self.Status.ACCEPTING_MESSAGES + self._topic = topic # This is purely internal tracking. - self._process = None + self._thread = None # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. - if autocommit and self._shared.settings.max_latency < float('inf'): - self._process = multiprocessing.Process(target=self.monitor) - self._process.start() + if autocommit and self._settings.max_latency < float('inf'): + self._thread = threading.Thread(target=self.monitor) + self._thread.start() @property def client(self): @@ -94,14 +90,33 @@ def client(self): return self._client @property - def manager(self): - """Return the client's manager. + def messages(self): + """Return the messages currently in the batch. + + Returns: + Sequence: The messages currently in the batch. + """ + return self._messages + + @property + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + return self._settings + + @property + def size(self): + """Return the total size of all of the messages currently in the batch. Returns: - :class:`multiprocessing.Manager`: The manager responsible for - handling shared memory objects. + int: The total size of all of the messages currently + in the batch, in bytes. """ - return self._manager + return self._size @property def status(self): @@ -111,24 +126,51 @@ def status(self): str: The status of this batch. All statuses are human-readable, all-lowercase strings. """ - return self._shared.status + return self._status def commit(self): """Actually publish all of the messages on the active batch. + This synchronously sets the batch status to in-flight, and then opens + a new thread, which handles actually sending the messages to Pub/Sub. + + .. note:: + + This method is non-blocking. It opens a new thread, which calls + :meth:`_commit`, which does block. + """ + # Set the status to in-flight synchronously, to ensure that + # this batch will necessarily not accept new messages. + # + # Yes, this is repeated in `_commit`, because that method is called + # directly by `monitor`. + self._status = 'in-flight' + + # Start a new thread to actually handle the commit. + commit_thread = threading.Thread(target=self._commit) + commit_thread.start() + + def _commit(self): + """Actually publish all of the messages on the active batch. + This moves the batch out from being the active batch to an in-flight batch on the publisher, and then the batch is discarded upon completion. + + .. note:: + + This method blocks. The :meth:`commit` method is the non-blocking + version, which calls this one. """ # Update the status. - self._shared.status = 'in-flight' + self._status = 'in-flight' # Begin the request to publish these messages. - if len(self._shared.messages) == 0: + if len(self._messages) == 0: raise Exception('Empty queue') response = self._client.api.publish( - self._shared.topic, - self._shared.messages, + self._topic, + self.messages, ) # FIXME (lukesneeringer): Check for failures; retry. @@ -138,7 +180,7 @@ def commit(self): # Sanity check: If the number of message IDs is not equal to the # number of futures I have, then something went wrong. - if len(response.message_ids) != len(self._shared.futures): + if len(response.message_ids) != len(self._futures): raise exceptions.PublishError( 'Some messages were not successfully published.', ) @@ -146,9 +188,9 @@ def commit(self): # Iterate over the futures on the queue and return the response IDs. # We are trusting that there is a 1:1 mapping, and raise an exception # if not. - self._shared.status = self.Status.SUCCESS - for message_id, fut in zip(response.message_ids, self._shared.futures): - self._shared.message_ids[hash(fut)] = message_id + self._status = self.Status.SUCCESS + for message_id, fut in zip(response.message_ids, self._futures): + self._message_ids[hash(fut)] = message_id fut._trigger() def monitor(self): @@ -161,74 +203,46 @@ def monitor(self): # in a separate thread. # # Sleep for however long we should be waiting. - time.sleep(self._shared.settings.max_latency) + time.sleep(self._settings.max_latency) # If, in the intervening period, the batch started to be committed, # then no-op at this point. - if self._shared.status != self.Status.ACCEPTING_MESSAGES: + if self._status != self.Status.ACCEPTING_MESSAGES: return # Commit. - return self.commit() + return self._commit() - def publish(self, data, **attrs): + def publish(self, message): """Publish a single message. - .. note:: - Messages in Pub/Sub are blobs of bytes. They are *binary* data, - not text. You must send data as a bytestring - (``bytes`` in Python 3; ``str`` in Python 2), and this library - will raise an exception if you send a text string. - - The reason that this is so important (and why we do not try to - coerce for you) is because Pub/Sub is also platform independent - and there is no way to know how to decode messages properly on - the other side; therefore, encoding and decoding is a required - exercise for the developer. - Add the given message to this object; this will cause it to be published once the batch either has enough messages or a sufficient period of time has elapsed. - Args: - data (bytes): A bytestring representing the message body. This - must be a bytestring (a text string will raise TypeError). - attrs (Mapping[str, str]): A dictionary of attributes to be - sent as metadata. (These may be text strings or byte strings.) + This method is called by :meth:`~.PublisherClient.publish`. - Raises: - TypeError: If the ``data`` sent is not a bytestring, or if the - ``attrs`` are not either a ``str`` or ``bytes``. + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the :class:`concurrent.futures.Future` interface. """ - # Sanity check: Is the data being sent as a bytestring? - # If it is literally anything else, complain loudly about it. - if not isinstance(data, six.binary_type): - raise TypeError('Data being published to Pub/Sub must be sent ' - 'as a bytestring.') - - # Coerce all attributes to text strings. - for k, v in copy.copy(attrs).items(): - if isinstance(data, six.text_type): - continue - if isinstance(data, six.binary_type): - attrs[k] = v.decode('utf-8') - continue - raise TypeError('All attributes being published to Pub/Sub must ' - 'be sent as text strings.') + # Coerce the type, just in case. + message = types.PubsubMessage(message) + + # Add the size to the running total of the size, so we know + # if future messages need to be rejected. + self._size += message.ByteSize() # Store the actual message in the batch's message queue. - self._shared.messages.append( - types.PubsubMessage(data=data, attributes=attrs), - ) + self._messages.append(message) # Return a Future. That future needs to be aware of the status # of this batch. - f = Future(self._shared) - self._shared.futures.append(f) + f = Future(self) + self._futures.append(f) return f @@ -242,11 +256,11 @@ class Future(object): methods in this library. Args: - batch (:class:`multiprocessing.Namespace`): Information about the - batch object that is committing this message. + batch (:class:`~.Batch`): The batch object that is committing + this message. """ - def __init__(self, batch_info): - self._batch_info = batch_info + def __init__(self, batch): + self._batch = batch self._callbacks = [] self._hash = hash(uuid.uuid4()) @@ -280,7 +294,7 @@ def done(self): This still returns True in failure cases; checking `result` or `exception` is the canonical way to assess success or failure. """ - return self._batch_info.status in ('success', 'error') + return self._batch.status in ('success', 'error') def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -302,7 +316,7 @@ def result(self, timeout=None): # return an appropriate value. err = self.exception(timeout=timeout) if err is None: - return self._batch_info.message_ids[hash(self)] + return self._batch.message_ids[hash(self)] raise err def exception(self, timeout=None, _wait=1): @@ -322,12 +336,12 @@ def exception(self, timeout=None, _wait=1): :class:`Exception`: The exception raised by the call, if any. """ # If the batch completed successfully, this should return None. - if self._batch_info.status == 'success': + if self._batch.status == 'success': return None # If this batch had an error, this should return it. - if self._batch_info.status == 'error': - return self._batch_info.error + if self._batch.status == 'error': + return self._batch.error # If the timeout has been exceeded, raise TimeoutError. if timeout and timeout < 0: diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 4941326c02a5..5ebbceec84e6 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -15,7 +15,6 @@ from __future__ import absolute_import import copy -import functools import pkg_resources import six @@ -25,7 +24,7 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher.batch import base -from google.cloud.pubsub_v1.publisher.batch import mp +from google.cloud.pubsub_v1.publisher.batch import thread __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -47,13 +46,13 @@ class PublisherClient(object): :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in order to define your own batcher. This is primarily provided to allow use of different concurrency models; the default - is based on :class:`multiprocessing.Process`. + is based on :class:`threading.Thread`. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. Generally, you should not need to set additional keyword arguments. """ - def __init__(self, batch_settings=(), batch_class=mp.Batch, **kwargs): + def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. kwargs['lib_name'] = 'gccl' @@ -66,23 +65,19 @@ def __init__(self, batch_settings=(), batch_class=mp.Batch, **kwargs): self._batch_class = batch_class self._batches = {} - @property - def concurrency(self): - """Return the concurrency strategy instance. + # Instantiate the "rejection batch", which is used for single-op + # acceptance checks if no batch is present. + self._rejection = base.RejectionBatch() - Returns: - ~.pubsub_v1.concurrency.base.PublishStrategy: The class responsible - for handling publishing concurrency. - """ - return self._concurrency - - def batch(self, topic, create=True, autocommit=True): + def batch(self, topic, message, create=True, autocommit=True): """Return the current batch. This will create a new batch only if no batch currently exists. Args: topic (str): A string representing the topic. + message (~.pubsub_v1.types.PubsubMessage): The message that will + be committed. create (bool): Whether to create a new batch if no batch is found. Defaults to True. autocommit (bool): Whether to autocommit this batch. @@ -93,8 +88,7 @@ def batch(self, topic, create=True, autocommit=True): """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - accepting = base.BaseBatch.Status.ACCEPTING_MESSAGES - if self._batches.get(topic, base.FAKE).status != accepting: + if not self._batches.get(topic, self._rejection).will_accept(message): if not create: return None self._batches[topic] = self._batch_class( @@ -161,5 +155,8 @@ def publish(self, topic, data, **attrs): raise TypeError('All attributes being published to Pub/Sub must ' 'be sent as text strings.') + # Create the Pub/Sub message object. + message = types.PubsubMessage(data=data, attributes=attrs) + # Delegate the publishing to the batch. - return self.batch(topic).publish(data, *attrs) + return self.batch(topic, message=message).publish(message) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index 410d6ea130fc..0a08bb9f4f2b 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -19,7 +19,7 @@ from google.cloud.gapic.pubsub.v1 import subscriber_client from google.cloud.pubsub_v1 import _gapic -from google.cloud.pubsub_v1.subscriber.policy import mp +from google.cloud.pubsub_v1.subscriber.policy import thread __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -43,14 +43,14 @@ class SubscriberClient(object): :class:`.pubsub_v1.subscriber.policy.base.BasePolicy` class in order to define your own consumer. This is primarily provided to allow use of different concurrency models; the default - is based on :class:`multiprocessing.Process`. + is based on :class:`threading.Thread`. kwargs (dict): Any additional arguments provided are sent as keyword keyword arguments to the underlying :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. Generally, you should not need to set additional keyword arguments. """ - def __init__(self, flow_control=(), policy_class=mp.Policy, + def __init__(self, flow_control=(), policy_class=thread.Policy, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index 2026b30f3f2c..8add5aef23c4 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -227,6 +227,7 @@ def _blocking_consume(self): response_generator = self._policy.call_rpc(request_generator) try: for response in response_generator: + _LOGGER.debug('Received response: {0}'.format(response)) self._policy.on_response(response) # If the loop above exits without an exception, then the @@ -237,8 +238,8 @@ def _blocking_consume(self): break except KeyboardInterrupt: self.stop_consuming() - except Exception as e: - self._policy.on_exception(e) + except Exception as exc: + self._policy.on_exception(exc) def start_consuming(self): """Start consuming the stream.""" diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py index b5df134260e4..3f8b64ed9f73 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py @@ -148,7 +148,7 @@ def percentile(self, percent): for k in reversed(list(self._data.keys())): target -= self._data[k] if target <= 0: - return self._data[k] + return k # The only way to get here is if there was no data. # In this case, just return 10 seconds. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 5690fd1c6d0d..e2bf9415f9a8 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -129,7 +129,7 @@ def ack(self): time_to_ack = math.ceil(time.time() - self._received_timestamp) self._policy.histogram.add(time_to_ack) self._policy.ack(self._ack_id) - self.release() + self.drop() def drop(self): """Release the message from lease management. @@ -181,4 +181,4 @@ def nack(self): This will cause the message to be re-delivered to the subscription. """ self.modify_ack_deadline(seconds=0) - self.release() + self.drop() diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index caa8c3580313..b740c0829865 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import abc +import logging import random import time @@ -26,13 +27,15 @@ from google.cloud.pubsub_v1.subscriber import consumer from google.cloud.pubsub_v1.subscriber import histogram +logger = logging.getLogger(__name__) + @six.add_metaclass(abc.ABCMeta) class BasePolicy(object): """Abstract class defining a subscription policy. - Although the :class:`~.pubsub_v1.subscriber.policy.mp.Policy` class, - based on :class:`multiprocessing.Process`, is fine for most cases, + Although the :class:`~.pubsub_v1.subscriber.policy.thread.Policy` class, + based on :class:`threading.Thread`, is fine for most cases, advanced users may need to implement something based on a different concurrency model. @@ -86,7 +89,7 @@ def ack_deadline(self): def initial_request(self): """Return the initial request. - This defines the intiial request that must always be sent to Pub/Sub + This defines the initial request that must always be sent to Pub/Sub immediately upon opening the subscription. """ return types.StreamingPullRequest( @@ -171,11 +174,13 @@ def maintain_leases(self): # This is based off of how long previous messages have taken to ack, # with a sensible default and within the ranges allowed by Pub/Sub. p99 = self.histogram.percentile(99) + logger.debug('The current p99 value is %d seconds.' % p99) # Create a streaming pull request. # We do not actually call `modify_ack_deadline` over and over because # it is more efficient to make a single request. ack_ids = list(self.managed_ack_ids) + logger.debug('Renewing lease for %d ack IDs.' % len(ack_ids)) if len(ack_ids) > 0: request = types.StreamingPullRequest( modify_deadline_ack_ids=ack_ids, @@ -189,7 +194,9 @@ def maintain_leases(self): # period between 0 seconds and 90% of the lease. This use of # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. - time.sleep(random.uniform(0.0, p99 * 0.9)) + snooze = random.uniform(0.0, p99 * 0.9) + logger.debug('Snoozing lease management for %f seconds.' % snooze) + time.sleep(snooze) self.maintain_leases() def modify_ack_deadline(self, ack_id, seconds): diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py similarity index 75% rename from pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py rename to pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 76129707cdd1..329cba9e3d86 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/mp.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -15,9 +15,9 @@ from __future__ import absolute_import from concurrent import futures -from multiprocessing import managers +import queue import logging -import multiprocessing +import threading import grpc @@ -29,15 +29,8 @@ logger = logging.getLogger(__name__) -# Allow sets to be able to be run through the managers; ensure they are -# iterable and have add/remove. -managers.SyncManager.register('set', set, - exposed=('__contains__', '__iter__', 'add', 'remove'), -) - - class Policy(base.BasePolicy): - """A consumer class based on :class:``multiprocessing.Process``. + """A consumer class based on :class:``threading.Thread``. This consumer handles the connection to the Pub/Sub service and all of the concurrency needs. @@ -47,19 +40,15 @@ def __init__(self, client, subscription): self._callback = lambda message: None # Create a manager for keeping track of shared state. - self._manager = multiprocessing.Manager() - self._shared = self._manager.Namespace(subscription=subscription) - self._managed_ack_ids = self._manager.set() - self._request_queue = self._manager.Queue() + self._managed_ack_ids = set() + self._request_queue = queue.Queue() # Call the superclass constructor. - super(Policy, self).__init__(client, subscription, - histogram_data=self._manager.dict(), - ) + super(Policy, self).__init__(client, subscription) # Also maintain a request queue and an executor. logger.debug('Creating callback requests thread (not starting).') - self._executor = futures.ProcessPoolExecutor() + self._executor = futures.ThreadPoolExecutor() self._callback_requests = helper_threads.QueueCallbackThread( self._request_queue, self.on_callback_request, @@ -67,33 +56,16 @@ def __init__(self, client, subscription): # Spawn a process that maintains all of the leases for this policy. logger.debug('Spawning lease process.') - self._lease_process = multiprocessing.Process( + self._lease_process = threading.Thread( target=self.maintain_leases, ) self._lease_process.daemon = True self._lease_process.start() - @property - def managed_ack_ids(self): - """Return the ack IDs currently being managed by the policy. - - Returns: - set: The set of ack IDs being managed. - """ - return self._managed_ack_ids - - @property - def subscription(self): - """Return the subscription. - - Returns: - str: The subscription - """ - return self._shared.subscription - def close(self): """Close the existing connection.""" self._consumer.helper_threads.stop('callback requests worker') + self._consumer.stop_consuming() def open(self, callback): """Open a streaming pull connection and begin receiving messages. @@ -138,5 +110,6 @@ def on_response(self, response): For each message, schedule a callback with the executor. """ for msg in response.received_messages: + logger.debug('New message received from Pub/Sub: %r', msg) message = Message(self, msg.ack_id, msg.message) self._executor.submit(self._callback, message) From 1df0ccfe51dc55d2e3ec7a367c4903f3e5313dd9 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 13 Jul 2017 12:33:54 -0700 Subject: [PATCH 38/86] WIP --- .../cloud/pubsub_v1/publisher/batch/thread.py | 3 ++- .../cloud/pubsub_v1/publisher/client.py | 4 ++-- .../cloud/pubsub_v1/subscriber/consumer.py | 9 ++++++++- .../cloud/pubsub_v1/subscriber/policy/base.py | 4 ++++ .../pubsub_v1/subscriber/policy/thread.py | 19 +++++++++++-------- 5 files changed, 27 insertions(+), 12 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 95418dc9cada..2744145a848b 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -230,7 +230,8 @@ def publish(self, message): :class:`concurrent.futures.Future` interface. """ # Coerce the type, just in case. - message = types.PubsubMessage(message) + if not isinstance(message, types.PubsubMessage): + message = types.PubsubMessage(message) # Add the size to the running total of the size, so we know # if future messages need to be rejected. diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 5ebbceec84e6..7aebdb1c4f9f 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -147,9 +147,9 @@ def publish(self, topic, data, **attrs): # Coerce all attributes to text strings. for k, v in copy.copy(attrs).items(): - if isinstance(data, six.text_type): + if isinstance(v, six.text_type): continue - if isinstance(data, six.binary_type): + if isinstance(v, six.binary_type): attrs[k] = v.decode('utf-8') continue raise TypeError('All attributes being published to Pub/Sub must ' diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index 8add5aef23c4..5d679dc0099c 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -178,6 +178,7 @@ def __init__(self, policy): self._request_queue = queue.Queue() self._exiting = threading.Event() + self.active = False self.helper_threads = helper_threads.HelperThreadRegistry() """:cls:`_helper_threads.HelperThreads`: manages the helper threads. The policy may use this to schedule its own helper threads. @@ -239,10 +240,15 @@ def _blocking_consume(self): except KeyboardInterrupt: self.stop_consuming() except Exception as exc: - self._policy.on_exception(exc) + try: + self._policy.on_exception(exc) + except: + self.active = False + raise def start_consuming(self): """Start consuming the stream.""" + self.active = True self._exiting.clear() self.helper_threads.start('consume bidirectional stream', self._request_queue, @@ -251,5 +257,6 @@ def start_consuming(self): def stop_consuming(self): """Signal the stream to stop and block until it completes.""" + self.active = False self._exiting.set() self.helper_threads.stop_all() diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index b740c0829865..77f24f57f92d 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -170,6 +170,10 @@ def maintain_leases(self): implementing your own policy, you _should_ call this method in an appropriate form of subprocess. """ + # Sanity check: Should this infinitely-recursive loop quit? + if not self._consumer.active: + return + # Determine the appropriate duration for the lease. # This is based off of how long previous messages have taken to ack, # with a sensible default and within the ranges allowed by Pub/Sub. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 329cba9e3d86..acb318006625 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -54,16 +54,9 @@ def __init__(self, client, subscription): self.on_callback_request, ) - # Spawn a process that maintains all of the leases for this policy. - logger.debug('Spawning lease process.') - self._lease_process = threading.Thread( - target=self.maintain_leases, - ) - self._lease_process.daemon = True - self._lease_process.start() - def close(self): """Close the existing connection.""" + # Close the main subscription connection. self._consumer.helper_threads.stop('callback requests worker') self._consumer.stop_consuming() @@ -77,14 +70,24 @@ def open(self, callback): Args: callback (Callable): The callback function. """ + # Start the thread to pass the requests. logger.debug('Starting callback requests worker.') self._callback = callback self._consumer.helper_threads.start('callback requests worker', self._request_queue, self._callback_requests, ) + + # Actually start consuming messages. self._consumer.start_consuming() + # Spawn a helper thread that maintains all of the leases for + # this policy. + logger.debug('Spawning lease maintenance worker.') + self._leaser = threading.Thread(target=self.maintain_leases) + self._leaser.daemon = True + self._leaser.start() + def on_callback_request(self, callback_request): """Map the callback request to the appropriate GRPC request.""" action, args = callback_request[0], callback_request[1:] From acb4534dda76fe367d080cbfe934a642974246f4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 13 Jul 2017 13:25:56 -0700 Subject: [PATCH 39/86] WIP --- pubsub/google/cloud/pubsub_v1/subscriber/consumer.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index 5d679dc0099c..a3f237d7fe63 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -200,6 +200,9 @@ def _request_generator_thread(self): """ # First, yield the initial request. This occurs on every new # connection, fundamentally including a resumed connection. + _LOGGER.debug('Sending initial request: {initial_request}'.format( + initial_request=self._policy.initial_request, + )) yield self._policy.initial_request # Now yield each of the items on the request queue, and block if there From 2fb27855748d21f58e94501deb2f5341b2bc8db4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 13 Jul 2017 14:34:22 -0700 Subject: [PATCH 40/86] Update subscriber client config to be sane. --- .../gapic/pubsub/v1/subscriber_client_config.json | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json index 4b31158fbac8..6180cc0a941f 100644 --- a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json @@ -35,6 +35,15 @@ "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 12000, "total_timeout_millis": 600000 + }, + "streaming": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 900000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 900000, + "total_timeout_millis": 900000 } }, "methods": { @@ -79,9 +88,9 @@ "retry_params_name": "messaging" }, "StreamingPull": { - "timeout_millis": 60000, + "timeout_millis": 900000, "retry_codes_name": "pull", - "retry_params_name": "messaging" + "retry_params_name": "streaming" }, "ModifyPushConfig": { "timeout_millis": 60000, From ef178e9de342cd839d07ebf0b1bc8edaa89335ff Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 18 Jul 2017 13:59:02 -0700 Subject: [PATCH 41/86] Start adding unit tests. --- pubsub/google/cloud/pubsub_v1/__init__.py | 4 +- .../cloud/pubsub_v1/publisher/__init__.py | 6 +- .../cloud/pubsub_v1/publisher/batch/thread.py | 2 - .../cloud/pubsub_v1/publisher/client.py | 8 +- .../cloud/pubsub_v1/subscriber/__init__.py | 4 +- .../cloud/pubsub_v1/subscriber/client.py | 2 +- pubsub/google/cloud/pubsub_v1/types.py | 2 +- pubsub/tests/unit/__init__.py | 0 pubsub/tests/unit/pubsub_v1/__init__.py | 0 .../unit/pubsub_v1/publisher/test_client.py | 123 ++++++++++++++++++ 10 files changed, 137 insertions(+), 14 deletions(-) create mode 100644 pubsub/tests/unit/__init__.py create mode 100644 pubsub/tests/unit/pubsub_v1/__init__.py create mode 100644 pubsub/tests/unit/pubsub_v1/publisher/test_client.py diff --git a/pubsub/google/cloud/pubsub_v1/__init__.py b/pubsub/google/cloud/pubsub_v1/__init__.py index 7e785dc9dc7a..21706f6eee5e 100644 --- a/pubsub/google/cloud/pubsub_v1/__init__.py +++ b/pubsub/google/cloud/pubsub_v1/__init__.py @@ -15,8 +15,8 @@ from __future__ import absolute_import from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.publisher import PublisherClient -from google.cloud.pubsub_v1.subscriber import SubscriberClient +from google.cloud.pubsub_v1.publisher import Client as PublisherClient +from google.cloud.pubsub_v1.subscriber import Client as SubscriberClient __all__ = ( 'PublisherClient', diff --git a/pubsub/google/cloud/pubsub_v1/publisher/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py index 60496983b352..76d54649448f 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/__init__.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -12,9 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.pubsub_v1.publisher.client import PublisherClient +from __future__ import absolute_import + +from google.cloud.pubsub_v1.publisher.client import Client __all__ = ( - 'PublisherClient', + 'Client', ) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 2744145a848b..b963ec1b8370 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -173,8 +173,6 @@ def _commit(self): self.messages, ) - # FIXME (lukesneeringer): Check for failures; retry. - # We got a response from Pub/Sub; denote that we are processing. self._status = 'processing results' diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 7aebdb1c4f9f..c5b56063a8a2 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -31,7 +31,7 @@ @_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) -class PublisherClient(object): +class Client(object): """A publisher client for Google Cloud Pub/Sub. This creates an object that is capable of publishing messages. @@ -70,7 +70,7 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): self._rejection = base.RejectionBatch() def batch(self, topic, message, create=True, autocommit=True): - """Return the current batch. + """Return the current batch for the provided topic. This will create a new batch only if no batch currently exists. @@ -84,7 +84,7 @@ def batch(self, topic, message, create=True, autocommit=True): This is primarily useful for debugging. Returns: - :class:~`pubsub_v1.batch.Batch` The batch object. + ~.pubsub_v1.batch.Batch: The batch object. """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. @@ -136,7 +136,7 @@ def publish(self, topic, data, **attrs): sent as metadata. (These may be text strings or byte strings.) Returns: - :class:`~.pubsub_v1.publisher.futures.Future`: An object conforming + ~.pubsub_v1.publisher.futures.Future: An object conforming to the ``concurrent.futures.Future`` interface. """ # Sanity check: Is the data being sent as a bytestring? diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py index ee2aaca57ef0..d98a7bb75be4 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -14,9 +14,9 @@ from __future__ import absolute_import -from google.cloud.pubsub_v1.subscriber.client import SubscriberClient +from google.cloud.pubsub_v1.subscriber.client import Client __all__ = ( - 'SubscriberClient', + 'Client', ) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index 0a08bb9f4f2b..58fa66881ba7 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -27,7 +27,7 @@ @_gapic.add_methods(subscriber_client.SubscriberClient, blacklist=('pull', 'streaming_pull')) -class SubscriberClient(object): +class Client(object): """A subscriber client for Google Cloud Pub/Sub. This creates an object that is capable of subscribing to messages. diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index 778391f1c21f..f7ab43d1ea4e 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -33,7 +33,7 @@ ) BatchSettings.__new__.__defaults__ = ( 1024 * 1024 * 5, # max_bytes: 5 MB - 0.25, # max_latency: 0.25 seconds + 1.0, # max_latency: 1.0 seconds 1000, # max_messages: 1,000 ) diff --git a/pubsub/tests/unit/__init__.py b/pubsub/tests/unit/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/tests/unit/pubsub_v1/__init__.py b/pubsub/tests/unit/pubsub_v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_client.py new file mode 100644 index 000000000000..3f9374a20fd6 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_client.py @@ -0,0 +1,123 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import pytest + +from google.cloud.gapic.pubsub.v1 import publisher_client + +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types + + +def test_init(): + client = publisher.Client() + + # A plain client should have an `api` (the underlying GAPIC) and a + # batch settings object, which should have the defaults. + assert isinstance(client.api, publisher_client.PublisherClient) + assert client.batch_settings.max_bytes == 5 * (2 ** 20) + assert client.batch_settings.max_latency == 1.0 + assert client.batch_settings.max_messages == 1000 + + +def test_batch_accepting(): + """Establish that an existing batch is returned if it accepts messages.""" + client = publisher.Client() + message = types.PubsubMessage(data=b'foo') + + # At first, there are no batches, so this should return a new batch + # which is also saved to the object. + ante = len(client._batches) + batch = client.batch('topic_name', message, autocommit=False) + assert len(client._batches) == ante + 1 + assert batch is client._batches['topic_name'] + + # A subsequent request should return the same batch. + batch2 = client.batch('topic_name', message, autocommit=False) + assert batch is batch2 + assert batch2 is client._batches['topic_name'] + + +def test_batch_without_autocreate(): + client = publisher.Client() + message = types.PubsubMessage(data=b'foo') + + # If `create=False` is sent, then when the batch is not found, None + # is returned instead. + ante = len(client._batches) + batch = client.batch('topic_name', message, create=False) + assert batch is None + assert len(client._batches) == ante + + +def test_publish(): + client = publisher.Client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'spam') + client.publish('topic_name', b'foo', bar='baz') + + # The batch's publish method should have been called twice. + assert batch.publish.call_count == 2 + + # In both cases + # The first call should correspond to the first message. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'spam' + assert not args[0].attributes + + # The second call should correspond to the second message. + _, args, _ = batch.publish.mock_calls[1] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_data_not_bytestring_error(): + client = publisher.Client() + with pytest.raises(TypeError): + client.publish(u'This is a text string.') + with pytest.raises(TypeError): + client.publish(42) + + +def test_publish_attrs_bytestring(): + client = publisher.Client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'foo', bar=b'baz') + + # The attributes should have been sent as text. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_attrs_type_error(): + client = publisher.Client() + with pytest.raises(TypeError): + client.publish(b'foo', answer=42) From 147ad18d9c9318dbf71458460dc46db79a464e2c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 19 Jul 2017 12:13:14 -0700 Subject: [PATCH 42/86] Beginning work on unit tests. --- .../cloud/pubsub_v1/publisher/batch/thread.py | 45 ++-- .../pubsub_v1/publisher/batch/test_thread.py | 197 ++++++++++++++++++ .../publisher/batch/test_thread_future.py | 112 ++++++++++ .../unit/pubsub_v1/publisher/test_client.py | 6 +- 4 files changed, 339 insertions(+), 21 deletions(-) create mode 100644 pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py create mode 100644 pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index b963ec1b8370..37664ef5ffec 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -60,16 +60,15 @@ class Batch(base.BaseBatch): def __init__(self, client, topic, settings, autocommit=True): self._client = client - # Create a namespace that is owned by the client manager; this - # is necessary to be able to have these values be communicable between - # processes. + # These objects are all communicated between threads; ensure that + # any writes to them are atomic. self._futures = [] self._messages = [] self._size = 0 - self._message_ids = {} self._settings = settings self._status = self.Status.ACCEPTING_MESSAGES self._topic = topic + self.message_ids = {} # This is purely internal tracking. self._thread = None @@ -167,8 +166,8 @@ def _commit(self): # Begin the request to publish these messages. if len(self._messages) == 0: - raise Exception('Empty queue') - response = self._client.api.publish( + return + response = self.client.api.publish( self._topic, self.messages, ) @@ -188,7 +187,7 @@ def _commit(self): # if not. self._status = self.Status.SUCCESS for message_id, fut in zip(response.message_ids, self._futures): - self._message_ids[hash(fut)] = message_id + self.message_ids[hash(fut)] = message_id fut._trigger() def monitor(self): @@ -229,7 +228,7 @@ def publish(self, message): """ # Coerce the type, just in case. if not isinstance(message, types.PubsubMessage): - message = types.PubsubMessage(message) + message = types.PubsubMessage(**message) # Add the size to the running total of the size, so we know # if future messages need to be rejected. @@ -255,7 +254,7 @@ class Future(object): methods in this library. Args: - batch (:class:`~.Batch`): The batch object that is committing + batch (`~.Batch`): The batch object that is committing this message. """ def __init__(self, batch): @@ -290,10 +289,13 @@ def running(self): def done(self): """Return True if the publish has completed, False otherwise. - This still returns True in failure cases; checking `result` or - `exception` is the canonical way to assess success or failure. + This still returns True in failure cases; checking :meth:`result` or + :meth:`exception` is the canonical way to assess success or failure. """ - return self._batch.status in ('success', 'error') + return self._batch.status in ( + self._batch.Status.SUCCESS, + self._batch.Status.ERROR, + ) def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -305,9 +307,12 @@ def result(self, timeout=None): timeout (int|float): The number of seconds before this call times out and raises TimeoutError. + Returns: + str: The message ID. + Raises: - :class:~`pubsub_v1.TimeoutError`: If the request times out. - :class:~`Exception`: For undefined exceptions in the underlying + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying call execution. """ # Attempt to get the exception if there is one. @@ -329,11 +334,15 @@ def exception(self, timeout=None, _wait=1): times out and raises TimeoutError. Raises: - :exc:`TimeoutError`: If the request times out. + TimeoutError: If the request times out. Returns: - :class:`Exception`: The exception raised by the call, if any. + Exception: The exception raised by the call, if any. """ + # If no timeout was specified, use inf. + if timeout is None: + timeout = float('inf') + # If the batch completed successfully, this should return None. if self._batch.status == 'success': return None @@ -343,14 +352,14 @@ def exception(self, timeout=None, _wait=1): return self._batch.error # If the timeout has been exceeded, raise TimeoutError. - if timeout and timeout < 0: + if timeout <= 0: raise exceptions.TimeoutError('Timed out waiting for exception.') # Wait a little while and try again. time.sleep(_wait) return self.exception( timeout=timeout - _wait, - _wait=min(_wait * 2, 60), + _wait=min(_wait * 2, timeout, 60), ) def add_done_callback(self, fn): diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py new file mode 100644 index 000000000000..bbbc9890a8b1 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -0,0 +1,197 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import time + +import mock + +import pytest + +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_batch(autocommit=False, **batch_settings): + """Return a batch object suitable for testing. + + Args: + autocommit (bool): Whether the batch should commit after + ``max_latency`` seconds. By default, this is ``False`` + for unit testing. + kwargs (dict): Arguments passed on to the + :class:``~.pubsub_v1.types.BatchSettings`` constructor. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: A batch object. + """ + client = publisher.Client() + settings = types.BatchSettings(**batch_settings) + return Batch(client, 'topic_name', settings, autocommit=autocommit) + + +def test_init(): + """Establish that a monitor thread is usually created on init.""" + client = publisher.Client() + + # Do not actually create a thread, but do verify that one was created; + # it should be running the batch's "monitor" method (which commits the + # batch once time elapses). + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch = Batch(client, 'topic_name', types.BatchSettings()) + Thread.assert_called_once_with(target=batch.monitor) + + # New batches start able to accept messages by default. + assert batch.status == batch.Status.ACCEPTING_MESSAGES + + +def test_init_infinite_latency(): + batch = create_batch(max_latency=float('inf')) + assert batch._thread is None + + +def test_client(): + client = publisher.Client() + settings = types.BatchSettings() + batch = Batch(client, 'topic_name', settings, autocommit=False) + assert batch.client is client + + +def test_commit(): + batch = create_batch() + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch.commit() + + # A thread should have been created to do the actual commit. + Thread.assert_called_once_with(target=batch._commit) + Thread.return_value.start.assert_called_once_with() + + # The batch's status needs to be something other than "accepting messages", + # since the commit started. + assert batch.status != batch.Status.ACCEPTING_MESSAGES + + +def test_blocking_commit(): + batch = create_batch() + futures = ( + batch.publish({'data': b'This is my message.'}), + batch.publish({'data': b'This is another message.'}), + ) + + # Set up the underlying API publish method to return a PublishResponse. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a', 'b']) + + # Actually commit the batch. + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with('topic_name', [ + types.PubsubMessage(data=b'This is my message.'), + types.PubsubMessage(data=b'This is another message.'), + ]) + + # Establish that all of the futures are done, and that they have the + # expected values. + assert all([f.done() for f in futures]) + assert futures[0].result() == 'a' + assert futures[1].result() == 'b' + + +def test_blocking_commit_no_messages(): + batch = create_batch() + with mock.patch.object(type(batch.client.api), 'publish') as publish: + batch._commit() + assert publish.call_count == 0 + + +def test_blocking_commit_wrong_messageid_length(): + batch = create_batch() + batch.publish({'data': b'blah blah blah'}) + batch.publish({'data': b'blah blah blah blah'}) + + # Set up a PublishResponse that only returns one message ID. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a']) + with pytest.raises(exceptions.PublishError): + batch._commit() + + +def test_monitor(): + batch = create_batch(max_latency=5.0) + with mock.patch.object(time, 'sleep') as sleep: + with mock.patch.object(type(batch), '_commit') as _commit: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # Since `monitor` runs in its own thread, it should call + # the blocking commit implementation. + _commit.assert_called_once_with() + + +def test_monitor_already_committed(): + batch = create_batch(max_latency=5.0) + batch._status = 'something else' + with mock.patch.object(time, 'sleep') as sleep: + with mock.patch.object(type(batch), '_commit') as _commit: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # Since the batch was no longer accepting messages, the + # commit function should *not* have been called. + assert _commit.call_count == 0 + + +def test_publish(): + batch = create_batch() + messages = ( + types.PubsubMessage(data=b'foobarbaz'), + types.PubsubMessage(data=b'spameggs'), + types.PubsubMessage(data=b'1335020400'), + ) + + # Publish each of the messages, which should save them to the batch. + for message in messages: + batch.publish(message) + + # There should be three messages on the batch, and three futures. + assert len(batch.messages) == 3 + assert len(batch._futures) == 3 + + # The size should have been incremented by the sum of the size of the + # messages. + assert batch.size == sum([m.ByteSize() for m in messages]) + assert batch.size > 0 # I do not always trust protobuf. + + +def test_publish_dict(): + batch = create_batch() + batch.publish({'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) + + # There should be one message on the batch. + assert len(batch.messages) == 1 + + # It should be an actual protobuf Message at this point, with the + # expected values. + message = batch.messages[0] + assert isinstance(message, types.PubsubMessage) + assert message.data == b'foobarbaz' + assert message.attributes == {'spam': 'eggs'} diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py new file mode 100644 index 000000000000..2b2cb26e1303 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py @@ -0,0 +1,112 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import mock + +import pytest + +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.batch.thread import Batch +from google.cloud.pubsub_v1.publisher.batch.thread import Future + + +def create_batch(status=None): + """Create a batch object, which does not commit. + + Args: + status (str): If provided, the batch's internal status will be set + to the provided status. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: The batch object + """ + client = publisher.Client() + batch_settings = types.BatchSettings() + batch = Batch(client, 'topic_name', batch_settings, autocommit=False) + if status: + batch._status = status + return batch + + +def create_future(batch=None): + """Create a Future object to test. + + Args: + ~.pubsub_v1.publisher.batch.thread.Batch: A batch object, such + as one returned from :meth:`create_batch`. If none is provided, + a batch will be automatically created. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Future: The Future object (the + class being tested in this module). + """ + if batch is None: + batch = create_batch() + return Future(batch=batch) + + +def test_cancel(): + assert create_future().cancel() is False + + +def test_cancelled(): + assert create_future().cancelled() is False + + +def test_running(): + assert create_future().running() is True + + +def test_done(): + batch = create_batch() + future = create_future(batch=batch) + assert future.done() is False + batch._status = batch.Status.SUCCESS + assert future._batch.status == 'success' + assert future.done() is True + + +def test_exception_no_error(): + batch = create_batch(status='success') + future = create_future(batch=batch) + assert future.exception() is None + + +def test_exception_with_error(): + batch = create_batch(status='error') + batch.error = RuntimeError('Something really bad happened.') + future = create_future(batch=batch) + + # Make sure that the exception that is returned is the batch's error. + # Also check the type to ensure the batch's error did not somehow + # change internally. + assert future.exception() is batch.error + assert isinstance(future.exception(), RuntimeError) + + +def test_exception_timeout(): + future = create_future() + with mock.patch.object(time, 'sleep') as sleep: + with pytest.raises(exceptions.TimeoutError): + future.exception(timeout=10) + + # The sleep should have been called with 1, 2, 4, then 3 seconds + # (the first three due to linear backoff, then the last one because + # only three seconds were left before the timeout was to be hit). + assert sleep.call_count == 4 + assert sleep.mock_calls[0] diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_client.py index 3f9374a20fd6..6d5f653a46f4 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/test_client.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_client.py @@ -94,9 +94,9 @@ def test_publish(): def test_publish_data_not_bytestring_error(): client = publisher.Client() with pytest.raises(TypeError): - client.publish(u'This is a text string.') + client.publish('topic_name', u'This is a text string.') with pytest.raises(TypeError): - client.publish(42) + client.publish('topic_name', 42) def test_publish_attrs_bytestring(): @@ -120,4 +120,4 @@ def test_publish_attrs_bytestring(): def test_publish_attrs_type_error(): client = publisher.Client() with pytest.raises(TypeError): - client.publish(b'foo', answer=42) + client.publish('topic_name', b'foo', answer=42) From 9a6b7cba8c7d7f1515709afc096851064e891dbd Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 20 Jul 2017 06:51:08 -0700 Subject: [PATCH 43/86] Update publisher to be thread-based. --- .../cloud/pubsub_v1/publisher/__init__.py | 6 +- .../cloud/pubsub_v1/publisher/batch/base.py | 121 ++++++--- .../publisher/batch/{mp.py => thread.py} | 237 +++++++++--------- .../cloud/pubsub_v1/publisher/client.py | 44 ++-- .../cloud/pubsub_v1/publisher/exceptions.py | 4 + 5 files changed, 222 insertions(+), 190 deletions(-) rename pubsub/google/cloud/pubsub_v1/publisher/batch/{mp.py => thread.py} (59%) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py index 60496983b352..76d54649448f 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/__init__.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -12,9 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.pubsub_v1.publisher.client import PublisherClient +from __future__ import absolute_import + +from google.cloud.pubsub_v1.publisher.client import Client __all__ = ( - 'PublisherClient', + 'Client', ) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index 54c959f12eb2..263c18e56a80 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -24,8 +24,8 @@ class BaseBatch(object): """The base batching class for Pub/Sub publishing. - Although the :class:`~.pubsub_v1.publisher.batch.mp.Batch` class, based - on :class:`multiprocessing.Process`, is fine for most cases, advanced + Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based + on :class:`threading.Thread`, is fine for most cases, advanced users may need to implement something based on a different concurrency model. @@ -33,6 +33,20 @@ class BaseBatch(object): subclasses may be passed as the ``batch_class`` argument to :class:`~.pubsub_v1.client.PublisherClient`. """ + def __len__(self): + """Return the number of messages currently in the batch.""" + return len(self.messages) + + @property + @abc.abstractmethod + def client(self): + """Return the client used to create this batch. + + Returns: + ~.pubsub_v1.client.PublisherClient: A publisher client. + """ + raise NotImplementedError + @property @abc.abstractmethod def client(self): @@ -43,15 +57,35 @@ def client(self): """ raise NotImplementedError + @property + @abc.abstractmethod + def messages(self): + """Return the messages currently in the batch. + + Returns: + Sequence: The messages currently in the batch. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + raise NotImplementedError + @property @abc.abstractmethod def settings(self): - """Return the settings for this batch. + """Return the batch settings. Returns: - ~.pubsub_v1.types.Batching: The settings for batch - publishing. These should be considered immutable once the batch - has been opened. + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. """ raise NotImplementedError @@ -62,52 +96,47 @@ def status(self): Returns: str: The status of this batch. All statuses are human-readable, - all-lowercase strings, and represented in the - :class:`BaseBatch.Status` enum. + all-lowercase strings. The ones represented in the + :class:`BaseBatch.Status` enum are special, but other statuses + are permitted. """ raise NotImplementedError - @abc.abstractmethod - def commit(self): - """Asychronously commit everything in this batch. + def will_accept(self, message): + """Return True if the batch is able to accept the message. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. - Subclasses must define this as an asychronous method; it may be called - from the primary process by :meth:`check_limits`. + Returns: + bool: Whether this batch can accept the message. """ - raise NotImplementedError + # If this batch is not accepting messages generally, return False. + if self.status != self.Status.ACCEPTING_MESSAGES: + return False - @abc.abstractmethod - def publish(self, data, **attrs): - """Publish a single message. + # If this batch can not hold the message in question, return False. + if self.size + message.ByteSize() > self.settings.max_bytes: + return False - .. note:: - Messages in Pub/Sub are blobs of bytes. They are *binary* data, - not text. You must send data as a bytestring - (``bytes`` in Python 3; ``str`` in Python 2), and this library - will raise an exception if you send a text string. + # Okay, everything is good. + return True - The reason that this is so important (and why we do not try to - coerce for you) is because Pub/Sub is also platform independent - and there is no way to know how to decode messages properly on - the other side; therefore, encoding and decoding is a required - exercise for the developer. + @abc.abstractmethod + def publish(self, message): + """Publish a single message. Add the given message to this object; this will cause it to be published once the batch either has enough messages or a sufficient period of time has elapsed. - Args: - data (bytes): A bytestring representing the message body. This - must be a bytestring (a text string will raise TypeError). - attrs (Mapping[str, str]): A dictionary of attributes to be - sent as metadata. (These may be text strings or byte strings.) + This method is called by :meth:`~.PublisherClient.publish`. - Raises: - TypeError: If the ``data`` sent is not a bytestring, or if the - ``attrs`` are not either a ``str`` or ``bytes``. + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: - ~.pubsub_v1.publisher.future.Future: An object conforming to the + ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the :class:`concurrent.futures.Future` interface. """ raise NotImplementedError @@ -124,7 +153,19 @@ class Status(object): SUCCESS = 'success' -# Make a fake batch. This is used by the client to do single-op checks -# for batch existence. -FakeBatch = collections.namedtuple('FakeBatch', ['status']) -FAKE = FakeBatch(status='fake') +class RejectionBatch(object): + """A fake batch-like object that refuses to accept any message. + + This is used by the client to do single-op checks for batch + existence. + """ + def will_accept(self, message): + """Return False. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + bool: Whether this batch can accept the message. It never can. + """ + return False diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py similarity index 59% rename from pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py rename to pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 52962ae264bf..37664ef5ffec 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/mp.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -14,14 +14,10 @@ from __future__ import absolute_import -import copy -import multiprocessing -import queue +import threading import time import uuid -import six - from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.batch import base @@ -54,7 +50,7 @@ class Batch(base.BaseBatch): create this batch. topic (str): The topic. The format for this is ``projects/{project}/topics/{topic}``. - settings (~.pubsub_v1.types.Batching): The settings for batch + settings (~.pubsub_v1.types.BatchSettings): The settings for batch publishing. These should be considered immutable once the batch has been opened. autocommit (bool): Whether to autocommit the batch when the time @@ -63,28 +59,25 @@ class Batch(base.BaseBatch): """ def __init__(self, client, topic, settings, autocommit=True): self._client = client - self._manager = multiprocessing.Manager() - - # Create a namespace that is owned by the client manager; this - # is necessary to be able to have these values be communicable between - # processes. - self._shared = self.manager.Namespace() - self._shared.futures = self.manager.list() - self._shared.messages = self.manager.list() - self._shared.message_ids = self.manager.dict() - self._shared.settings = settings - self._shared.status = self.Status.ACCEPTING_MESSAGES - self._shared.topic = topic - # This is purely internal tracking. - self._process = None + # These objects are all communicated between threads; ensure that + # any writes to them are atomic. + self._futures = [] + self._messages = [] self._size = 0 + self._settings = settings + self._status = self.Status.ACCEPTING_MESSAGES + self._topic = topic + self.message_ids = {} + + # This is purely internal tracking. + self._thread = None # Continually monitor the thread until it is time to commit the # batch, or the batch is explicitly committed. - if autocommit and self.settings.max_latency < float('inf'): - self._process = multiprocessing.Process(target=self.monitor) - self._process.start() + if autocommit and self._settings.max_latency < float('inf'): + self._thread = threading.Thread(target=self.monitor) + self._thread.start() @property def client(self): @@ -96,25 +89,33 @@ def client(self): return self._client @property - def manager(self): - """Return the client's manager. + def messages(self): + """Return the messages currently in the batch. Returns: - :class:`multiprocessing.Manager`: The manager responsible for - handling shared memory objects. + Sequence: The messages currently in the batch. """ - return self._manager + return self._messages @property def settings(self): - """Return the settings for this batch. + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + return self._settings + + @property + def size(self): + """Return the total size of all of the messages currently in the batch. Returns: - ~.pubsub_v1.types.Batching: The settings for batch - publishing. These should be considered immutable once the batch - has been opened. + int: The total size of all of the messages currently + in the batch, in bytes. """ - return self._shared.settings + return self._size @property def status(self): @@ -124,15 +125,29 @@ def status(self): str: The status of this batch. All statuses are human-readable, all-lowercase strings. """ - return self._shared.status + return self._status def commit(self): - """Asychronously publish all of the messages on the active branch. + """Actually publish all of the messages on the active batch. + + This synchronously sets the batch status to in-flight, and then opens + a new thread, which handles actually sending the messages to Pub/Sub. + + .. note:: - This method may be safely called from the primary process. + This method is non-blocking. It opens a new thread, which calls + :meth:`_commit`, which does block. """ - process = multiprocessing.Process(self._commit) - process.start() + # Set the status to in-flight synchronously, to ensure that + # this batch will necessarily not accept new messages. + # + # Yes, this is repeated in `_commit`, because that method is called + # directly by `monitor`. + self._status = 'in-flight' + + # Start a new thread to actually handle the commit. + commit_thread = threading.Thread(target=self._commit) + commit_thread.start() def _commit(self): """Actually publish all of the messages on the active batch. @@ -140,26 +155,29 @@ def _commit(self): This moves the batch out from being the active batch to an in-flight batch on the publisher, and then the batch is discarded upon completion. + + .. note:: + + This method blocks. The :meth:`commit` method is the non-blocking + version, which calls this one. """ # Update the status. - self._shared.status = 'in-flight' + self._status = 'in-flight' # Begin the request to publish these messages. - if len(self._shared.messages) == 0: - raise Exception('Empty queue') - response = self._client.api.publish( - self._shared.topic, - self._shared.messages, + if len(self._messages) == 0: + return + response = self.client.api.publish( + self._topic, + self.messages, ) - # FIXME (lukesneeringer): Check for failures; retry. - # We got a response from Pub/Sub; denote that we are processing. self._status = 'processing results' # Sanity check: If the number of message IDs is not equal to the # number of futures I have, then something went wrong. - if len(response.message_ids) != len(self._shared.futures): + if len(response.message_ids) != len(self._futures): raise exceptions.PublishError( 'Some messages were not successfully published.', ) @@ -167,9 +185,9 @@ def _commit(self): # Iterate over the futures on the queue and return the response IDs. # We are trusting that there is a 1:1 mapping, and raise an exception # if not. - self._shared.status = self.Status.SUCCESS - for message_id, fut in zip(response.message_ids, self._shared.futures): - self._shared.message_ids[hash(fut)] = message_id + self._status = self.Status.SUCCESS + for message_id, fut in zip(response.message_ids, self._futures): + self.message_ids[hash(fut)] = message_id fut._trigger() def monitor(self): @@ -182,88 +200,47 @@ def monitor(self): # in a separate thread. # # Sleep for however long we should be waiting. - time.sleep(self.settings.max_latency) + time.sleep(self._settings.max_latency) # If, in the intervening period, the batch started to be committed, # then no-op at this point. - if self.status != self.Status.ACCEPTING_MESSAGES: + if self._status != self.Status.ACCEPTING_MESSAGES: return # Commit. return self._commit() - def publish(self, data, **attrs): + def publish(self, message): """Publish a single message. - .. note:: - Messages in Pub/Sub are blobs of bytes. They are *binary* data, - not text. You must send data as a bytestring - (``bytes`` in Python 3; ``str`` in Python 2), and this library - will raise an exception if you send a text string. - - The reason that this is so important (and why we do not try to - coerce for you) is because Pub/Sub is also platform independent - and there is no way to know how to decode messages properly on - the other side; therefore, encoding and decoding is a required - exercise for the developer. - Add the given message to this object; this will cause it to be published once the batch either has enough messages or a sufficient period of time has elapsed. - Args: - data (bytes): A bytestring representing the message body. This - must be a bytestring (a text string will raise TypeError). - attrs (Mapping[str, str]): A dictionary of attributes to be - sent as metadata. (These may be text strings or byte strings.) + This method is called by :meth:`~.PublisherClient.publish`. - Raises: - TypeError: If the ``data`` sent is not a bytestring, or if the - ``attrs`` are not either a ``str`` or ``bytes``. + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the :class:`concurrent.futures.Future` interface. """ - # Sanity check: Is the data being sent as a bytestring? - # If it is literally anything else, complain loudly about it. - if not isinstance(data, six.binary_type): - raise TypeError('Data being published to Pub/Sub must be sent ' - 'as a bytestring.') - - # Coerce all attributes to text strings. - for k, v in copy.copy(attrs).items(): - if isinstance(data, six.text_type): - continue - if isinstance(data, six.binary_type): - attrs[k] = v.decode('utf-8') - continue - raise TypeError('All attributes being published to Pub/Sub must ' - 'be sent as text strings.') + # Coerce the type, just in case. + if not isinstance(message, types.PubsubMessage): + message = types.PubsubMessage(**message) - # Store the actual message in the batch's message queue. - self._shared.messages.append( - types.PubsubMessage(data=data, attributes=attrs), - ) + # Add the size to the running total of the size, so we know + # if future messages need to be rejected. + self._size += message.ByteSize() - # Add the size of the message to our size tracking. - self._size += len(data) - self._size += sum([len(k) + len(v) for k, v in attrs.items()]) + # Store the actual message in the batch's message queue. + self._messages.append(message) # Return a Future. That future needs to be aware of the status # of this batch. - f = Future(self._shared) - self._shared.futures.append(f) - - # Check and see if we have hit message limits. If we have, - # commit. - if len(self._shared.messages) >= self.settings.max_messages: - self._shared.status = 'at message cap' - self.commit() - if self._size >= self.settings.max_bytes: - self._shared.status = 'at size cap' - self.commit() - + f = Future(self) + self._futures.append(f) return f @@ -277,11 +254,11 @@ class Future(object): methods in this library. Args: - batch (:class:`multiprocessing.Namespace`): Information about the - batch object that is committing this message. + batch (`~.Batch`): The batch object that is committing + this message. """ - def __init__(self, batch_info): - self._batch_info = batch_info + def __init__(self, batch): + self._batch = batch self._callbacks = [] self._hash = hash(uuid.uuid4()) @@ -312,10 +289,13 @@ def running(self): def done(self): """Return True if the publish has completed, False otherwise. - This still returns True in failure cases; checking `result` or - `exception` is the canonical way to assess success or failure. + This still returns True in failure cases; checking :meth:`result` or + :meth:`exception` is the canonical way to assess success or failure. """ - return self._batch_info.status in ('success', 'error') + return self._batch.status in ( + self._batch.Status.SUCCESS, + self._batch.Status.ERROR, + ) def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -327,9 +307,12 @@ def result(self, timeout=None): timeout (int|float): The number of seconds before this call times out and raises TimeoutError. + Returns: + str: The message ID. + Raises: - :class:~`pubsub_v1.TimeoutError`: If the request times out. - :class:~`Exception`: For undefined exceptions in the underlying + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying call execution. """ # Attempt to get the exception if there is one. @@ -337,7 +320,7 @@ def result(self, timeout=None): # return an appropriate value. err = self.exception(timeout=timeout) if err is None: - return self._batch_info.message_ids[hash(self)] + return self._batch.message_ids[hash(self)] raise err def exception(self, timeout=None, _wait=1): @@ -351,28 +334,32 @@ def exception(self, timeout=None, _wait=1): times out and raises TimeoutError. Raises: - :exc:`TimeoutError`: If the request times out. + TimeoutError: If the request times out. Returns: - :class:`Exception`: The exception raised by the call, if any. + Exception: The exception raised by the call, if any. """ + # If no timeout was specified, use inf. + if timeout is None: + timeout = float('inf') + # If the batch completed successfully, this should return None. - if self._batch_info.status == 'success': + if self._batch.status == 'success': return None # If this batch had an error, this should return it. - if self._batch_info.status == 'error': - return self._batch_info.error + if self._batch.status == 'error': + return self._batch.error # If the timeout has been exceeded, raise TimeoutError. - if timeout and timeout < 0: - raise TimeoutError('Timed out waiting for an exception.') + if timeout <= 0: + raise exceptions.TimeoutError('Timed out waiting for exception.') # Wait a little while and try again. time.sleep(_wait) return self.exception( timeout=timeout - _wait, - _wait=min(_wait * 2, 60), + _wait=min(_wait * 2, timeout, 60), ) def add_done_callback(self, fn): diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 0e815395d74c..c5b56063a8a2 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -14,7 +14,7 @@ from __future__ import absolute_import -import functools +import copy import pkg_resources import six @@ -24,14 +24,14 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher.batch import base -from google.cloud.pubsub_v1.publisher.batch import mp +from google.cloud.pubsub_v1.publisher.batch import thread __VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version @_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) -class PublisherClient(object): +class Client(object): """A publisher client for Google Cloud Pub/Sub. This creates an object that is capable of publishing messages. @@ -46,13 +46,13 @@ class PublisherClient(object): :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in order to define your own batcher. This is primarily provided to allow use of different concurrency models; the default - is based on :class:`multiprocessing.Process`. + is based on :class:`threading.Thread`. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. Generally, you should not need to set additional keyword arguments. """ - def __init__(self, batch_settings=(), batch_class=mp.Batch, **kwargs): + def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. kwargs['lib_name'] = 'gccl' @@ -65,35 +65,30 @@ def __init__(self, batch_settings=(), batch_class=mp.Batch, **kwargs): self._batch_class = batch_class self._batches = {} - @property - def concurrency(self): - """Return the concurrency strategy instance. + # Instantiate the "rejection batch", which is used for single-op + # acceptance checks if no batch is present. + self._rejection = base.RejectionBatch() - Returns: - ~.pubsub_v1.concurrency.base.PublishStrategy: The class responsible - for handling publishing concurrency. - """ - return self._concurrency - - def batch(self, topic, create=True, autocommit=True): - """Return the current batch. + def batch(self, topic, message, create=True, autocommit=True): + """Return the current batch for the provided topic. This will create a new batch only if no batch currently exists. Args: topic (str): A string representing the topic. + message (~.pubsub_v1.types.PubsubMessage): The message that will + be committed. create (bool): Whether to create a new batch if no batch is found. Defaults to True. autocommit (bool): Whether to autocommit this batch. This is primarily useful for debugging. Returns: - :class:~`pubsub_v1.batch.Batch` The batch object. + ~.pubsub_v1.batch.Batch: The batch object. """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - accepting = base.BaseBatch.Status.ACCEPTING_MESSAGES - if self._batches.get(topic, base.FAKE).status != accepting: + if not self._batches.get(topic, self._rejection).will_accept(message): if not create: return None self._batches[topic] = self._batch_class( @@ -141,7 +136,7 @@ def publish(self, topic, data, **attrs): sent as metadata. (These may be text strings or byte strings.) Returns: - :class:`~.pubsub_v1.publisher.futures.Future`: An object conforming + ~.pubsub_v1.publisher.futures.Future: An object conforming to the ``concurrent.futures.Future`` interface. """ # Sanity check: Is the data being sent as a bytestring? @@ -152,13 +147,16 @@ def publish(self, topic, data, **attrs): # Coerce all attributes to text strings. for k, v in copy.copy(attrs).items(): - if isinstance(data, six.text_type): + if isinstance(v, six.text_type): continue - if isinstance(data, six.binary_type): + if isinstance(v, six.binary_type): attrs[k] = v.decode('utf-8') continue raise TypeError('All attributes being published to Pub/Sub must ' 'be sent as text strings.') + # Create the Pub/Sub message object. + message = types.PubsubMessage(data=data, attributes=attrs) + # Delegate the publishing to the batch. - return self.batch(topic).publish(data, *attrs) + return self.batch(topic, message=message).publish(message) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index e37993b24035..bedc5d5a2a48 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -14,3 +14,7 @@ class PublishError(RuntimeError): pass + + +class TimeoutError(RuntimeError): + pass From 9c701e320660ab96ac7e1e3b9ada6310df639ef0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 25 Jul 2017 09:02:43 -0700 Subject: [PATCH 44/86] Publisher tests complete. --- pubsub/.coveragerc | 6 ++ .../pubsub_v1/publisher/batch/test_base.py | 63 +++++++++++++++++++ .../publisher/batch/test_thread_future.py | 41 ++++++++++++ .../unit/pubsub_v1/publisher/test_client.py | 15 +++++ 4 files changed, 125 insertions(+) create mode 100644 pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py diff --git a/pubsub/.coveragerc b/pubsub/.coveragerc index a54b99aa14b7..588fc38a3c2d 100644 --- a/pubsub/.coveragerc +++ b/pubsub/.coveragerc @@ -1,5 +1,9 @@ [run] branch = True +source = + google.cloud.pubsub + google.cloud.pubsub_v1 + tests.unit [report] fail_under = 100 @@ -9,3 +13,5 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py new file mode 100644 index 000000000000..17203e129922 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -0,0 +1,63 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_batch(status=None, settings=types.BatchSettings()): + """Create a batch object, which does not commit. + + Args: + status (str): If provided, the batch's internal status will be set + to the provided status. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: The batch object + """ + client = publisher.Client() + batch = Batch(client, 'topic_name', settings, autocommit=False) + if status: + batch._status = status + return batch + + +def test_len(): + batch = create_batch(status=Batch.Status.ACCEPTING_MESSAGES) + assert len(batch) == 0 + batch.publish(types.PubsubMessage(data=b'foo')) + assert len(batch) == 1 + +def test_will_accept(): + batch = create_batch(status=Batch.Status.ACCEPTING_MESSAGES) + message = types.PubsubMessage() + assert batch.will_accept(message) is True + + +def test_will_not_accept_status(): + batch = create_batch(status='talk to the hand') + message = types.PubsubMessage() + assert batch.will_accept(message) is False + + +def test_will_not_accept_size(): + batch = create_batch( + settings=types.BatchSettings(max_bytes=10), + status=Batch.Status.ACCEPTING_MESSAGES, + ) + message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') + assert batch.will_accept(message) is False diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py index 2b2cb26e1303..89661947ddee 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py @@ -110,3 +110,44 @@ def test_exception_timeout(): # only three seconds were left before the timeout was to be hit). assert sleep.call_count == 4 assert sleep.mock_calls[0] + + +def test_result_no_error(): + batch = create_batch(status='success') + future = create_future(batch=batch) + batch.message_ids[hash(future)] = '42' + assert future.result() == '42' + + +def test_result_with_error(): + batch = create_batch(status='error') + batch.error = RuntimeError('Something really bad happened.') + future = create_future(batch=batch) + with pytest.raises(RuntimeError): + future.result() + + +def test_add_done_callback_pending_batch(): + future = create_future() + callback = mock.Mock() + future.add_done_callback(callback) + assert len(future._callbacks) == 1 + assert callback in future._callbacks + assert callback.call_count == 0 + + +def test_add_done_callback_completed_batch(): + batch = create_batch(status='success') + future = create_future(batch=batch) + callback = mock.Mock() + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_trigger(): + future = create_future() + callback = mock.Mock() + future.add_done_callback(callback) + assert callback.call_count == 0 + future._trigger() + callback.assert_called_once_with(future) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_client.py index 6d5f653a46f4..6ee66d636578 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/test_client.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_client.py @@ -121,3 +121,18 @@ def test_publish_attrs_type_error(): client = publisher.Client() with pytest.raises(TypeError): client.publish('topic_name', b'foo', answer=42) + + +def test_gapic_instance_method(): + client = publisher.Client() + with mock.patch.object(client.api, '_create_topic', autospec=True) as ct: + client.create_topic('projects/foo/topics/bar') + assert ct.call_count == 1 + _, args, _ = ct.mock_calls[0] + assert args[0] == types.Topic(name='projects/foo/topics/bar') + + +def test_gapic_class_method(): + client = publisher.Client() + answer = client.topic_path('foo', 'bar') + assert answer == 'projects/foo/topics/bar' From de38b839d8d7d2ba057b7d218a9cc1ddd1af8d44 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Jul 2017 07:19:14 -0700 Subject: [PATCH 45/86] subscriber/client.py tests --- pubsub/tests/unit/pubsub_v1/__init__.py | 0 .../pubsub_v1/publisher/batch/test_base.py | 1 + .../publisher/batch/test_thread_future.py | 4 +- ...est_client.py => test_publisher_client.py} | 0 .../pubsub_v1/subscriber/test_consumer.py | 13 +++++++ .../subscriber/test_subscriber_client.py | 38 +++++++++++++++++++ 6 files changed, 54 insertions(+), 2 deletions(-) delete mode 100644 pubsub/tests/unit/pubsub_v1/__init__.py rename pubsub/tests/unit/pubsub_v1/publisher/{test_client.py => test_publisher_client.py} (100%) create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py diff --git a/pubsub/tests/unit/pubsub_v1/__init__.py b/pubsub/tests/unit/pubsub_v1/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index 17203e129922..5210d2e62b58 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -42,6 +42,7 @@ def test_len(): batch.publish(types.PubsubMessage(data=b'foo')) assert len(batch) == 1 + def test_will_accept(): batch = create_batch(status=Batch.Status.ACCEPTING_MESSAGES) message = types.PubsubMessage() diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py index 89661947ddee..ee4014ee3691 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py @@ -139,14 +139,14 @@ def test_add_done_callback_pending_batch(): def test_add_done_callback_completed_batch(): batch = create_batch(status='success') future = create_future(batch=batch) - callback = mock.Mock() + callback = mock.Mock(spec=()) future.add_done_callback(callback) callback.assert_called_once_with(future) def test_trigger(): future = create_future() - callback = mock.Mock() + callback = mock.Mock(spec=()) future.add_done_callback(callback) assert callback.call_count == 0 future._trigger() diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py similarity index 100% rename from pubsub/tests/unit/pubsub_v1/publisher/test_client.py rename to pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py new file mode 100644 index 000000000000..4fc99a9082dc --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -0,0 +1,13 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py new file mode 100644 index 000000000000..77f8b016abb6 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -0,0 +1,38 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def test_init(): + client = subscriber.Client() + assert client._policy_class is thread.Policy + + +def test_subscribe(): + client = subscriber.Client() + subscription = client.subscribe('sub_name') + assert isinstance(subscription, thread.Policy) + + +def test_subscribe_with_callback(): + client = subscriber.Client() + callback = mock.Mock() + with mock.patch.object(thread.Policy, 'open') as open_: + subscription = client.subscribe('sub_name', callback) + open_.assert_called_once_with(callback) + assert isinstance(subscription, thread.Policy) From faeaa8edcc56a011b9398992e9943597c2c35243 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Jul 2017 08:53:34 -0700 Subject: [PATCH 46/86] Consumer tests --- .../cloud/pubsub_v1/subscriber/exceptions.py | 19 ---- .../pubsub_v1/subscriber/test_consumer.py | 103 ++++++++++++++++++ 2 files changed, 103 insertions(+), 19 deletions(-) delete mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py b/pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py deleted file mode 100644 index 43a659974c23..000000000000 --- a/pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2017, Google Inc. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - - -class AlreadyOpen(RuntimeError): - pass diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 4fc99a9082dc..24d60a627989 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -11,3 +11,106 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +import queue + +import mock + +import pytest + +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import consumer +from google.cloud.pubsub_v1.subscriber import helper_threads +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_consumer(): + client = subscriber.Client() + subscription = client.subscribe('sub_name') + return consumer.Consumer(policy=subscription) + + +def test_send_request(): + consumer = create_consumer() + request = types.StreamingPullRequest(subscription='foo') + with mock.patch.object(queue.Queue, 'put') as put: + consumer.send_request(request) + put.assert_called_once_with(request) + + +def test_request_generator_thread(): + consumer = create_consumer() + generator = consumer._request_generator_thread() + + # The first request that comes from the request generator thread + # should always be the initial request. + initial_request = next(generator) + assert initial_request.subscription == 'sub_name' + assert initial_request.stream_ack_deadline_seconds == 10 + + # Subsequent requests correspond to items placed in the request queue. + consumer.send_request(types.StreamingPullRequest(ack_ids=['i'])) + request = next(generator) + assert request.ack_ids == ['i'] + + # The poison pill should stop the loop. + consumer.send_request(helper_threads.STOP) + with pytest.raises(StopIteration): + next(generator) + + +def test_blocking_consume(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we run out of them. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + consumer._blocking_consume() + assert on_res.call_count == 2 + assert on_res.mock_calls[0][1][1] == mock.sentinel.A + assert on_res.mock_calls[1][1][1] == mock.sentinel.B + + +def test_blocking_consume_keyboard_interrupt(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we are sent the exiting event. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + on_res.side_effect = KeyboardInterrupt + consumer._blocking_consume() + on_res.assert_called_once_with(consumer._policy, mock.sentinel.A) + + +@mock.patch.object(thread.Policy, 'call_rpc', autospec=True) +@mock.patch.object(thread.Policy, 'on_response', autospec=True) +@mock.patch.object(thread.Policy, 'on_exception', autospec=True) +def test_blocking_consume_exception_reraise(on_exc, on_res, call_rpc): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we are sent the exiting event. + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + on_res.side_effect = TypeError('Bad things!') + on_exc.side_effect = on_res.side_effect + with pytest.raises(TypeError): + consumer._blocking_consume() + + +def test_start_consuming(): + consumer = create_consumer() + helper_threads = consumer.helper_threads + with mock.patch.object(helper_threads, 'start', autospec=True) as start: + consumer.start_consuming() + assert consumer._exiting.is_set() is False + assert consumer.active is True + start.assert_called_once_with( + 'consume bidirectional stream', + consumer._request_queue, + consumer._blocking_consume, + ) From d467719e732adbc3f82caa4df4855f402abc9929 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Jul 2017 08:54:21 -0700 Subject: [PATCH 47/86] Fix minor linting error. --- pubsub/nox.py | 12 ++++++------ .../tests/unit/pubsub_v1/subscriber/test_consumer.py | 1 - 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/pubsub/nox.py b/pubsub/nox.py index 209ed41f9bfc..0950c73db088 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -35,10 +35,9 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.pubsub', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run('py.test', '--quiet', '--cov-append', '--cov-report=', + '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', + '--cov=tests.unit', '--cov-config=.coveragerc', 'tests/unit' ) @@ -95,5 +94,6 @@ def cover(session): """ session.interpreter = 'python3.6' session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') + session.run('coverage', 'html', '--fail-under=0') + # session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase', success_codes=(0, 1)) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 24d60a627989..3ab7b21e86c1 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -92,7 +92,6 @@ def test_blocking_consume_keyboard_interrupt(): @mock.patch.object(thread.Policy, 'on_exception', autospec=True) def test_blocking_consume_exception_reraise(on_exc, on_res, call_rpc): consumer = create_consumer() - Policy = type(consumer._policy) # Establish that we get responses until we are sent the exiting event. call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) From c821d33944908d73f07d73e26cf6ab615d7a4af0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Jul 2017 09:14:40 -0700 Subject: [PATCH 48/86] Histogram tests --- .../cloud/pubsub_v1/subscriber/histogram.py | 10 +-- .../pubsub_v1/subscriber/test_histogram.py | 84 +++++++++++++++++++ 2 files changed, 89 insertions(+), 5 deletions(-) create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py index 3f8b64ed9f73..0e3d74d68b25 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py @@ -91,7 +91,7 @@ def max(self): """ if len(self._data) == 0: return 600 - return next(iter(reversed(list(self._data.keys())))) + return next(iter(reversed(sorted(self._data.keys())))) @property def min(self): @@ -104,7 +104,7 @@ def min(self): """ if len(self._data) == 0: return 10 - return next(iter(self._data.keys())) + return next(iter(sorted(self._data.keys()))) def add(self, value): """Add the value to this histogram. @@ -143,11 +143,11 @@ def percentile(self, percent): target = len(self) - len(self) * (percent / 100) # Iterate over the values in reverse, dropping the target by the - # number of times each value has been seen. When the target reaches + # number of times each value has been seen. When the target passes # 0, return the value we are currently viewing. - for k in reversed(list(self._data.keys())): + for k in reversed(sorted(self._data.keys())): target -= self._data[k] - if target <= 0: + if target < 0: return k # The only way to get here is if there was no data. diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py new file mode 100644 index 000000000000..d3e5e02a92c0 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -0,0 +1,84 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.pubsub_v1.subscriber import histogram + + +def test_init(): + data = {} + histo = histogram.Histogram(data=data) + assert histo._data is data + assert len(histo) == 0 + + +def test_contains(): + histo = histogram.Histogram() + histo.add(10) + histo.add(20) + assert 10 in histo + assert 20 in histo + assert 30 not in histo + + +def test_max(): + histo = histogram.Histogram() + assert histo.max == 600 + histo.add(120) + assert histo.max == 120 + histo.add(150) + assert histo.max == 150 + histo.add(20) + assert histo.max == 150 + + +def test_min(): + histo = histogram.Histogram() + assert histo.min == 10 + histo.add(60) + assert histo.min == 60 + histo.add(30) + assert histo.min == 30 + histo.add(120) + assert histo.min == 30 + + +def test_add(): + histo = histogram.Histogram() + histo.add(60) + assert histo._data[60] == 1 + histo.add(60) + assert histo._data[60] == 2 + + +def test_add_lower_limit(): + histo = histogram.Histogram() + histo.add(5) + assert 5 not in histo + assert 10 in histo + + +def test_add_upper_limit(): + histo = histogram.Histogram() + histo.add(12000) + assert 12000 not in histo + assert 600 in histo + + +def test_percentile(): + histo = histogram.Histogram() + [histo.add(i) for i in range(101, 201)] + assert histo.percentile(100) == 200 + assert histo.percentile(101) == 200 + assert histo.percentile(99) == 199 + assert histo.percentile(1) == 101 From ed750b2d69359b38c4e82105e5dace41a13154a7 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Jul 2017 09:50:34 -0700 Subject: [PATCH 49/86] Minor fix based on Max feedback. --- pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 77f24f57f92d..d03c85cb8f55 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -134,9 +134,7 @@ def call_rpc(self, request_generator): and blocks if there are no outstanding requests (until such time as there are). """ - return self._client.api.streaming_pull(request_generator, - options=gax.CallOptions(timeout=600), - ) + return self._client.api.streaming_pull(request_generator) def drop(self, ack_id): """Remove the given ack ID from lease management. From 216310c7922c198b61741788981889e6cc6cfeae Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Jul 2017 10:21:32 -0700 Subject: [PATCH 50/86] starting on helper thread tests --- .../subscriber/test_helper_threads.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py new file mode 100644 index 000000000000..216c3c8dce89 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -0,0 +1,29 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import threading + +import mock + +from google.cloud.pubsub_v1.subscriber import helper_threads + + +def test_start(): + registry = helper_threads.HelperThreadRegistry() + queue_ = queue.Queue() + target = mock.Mock(spec=()) + with mock.patch.object(threading.Thread, 'start', autospec=True) as start: + registry.start('foo', queue_, target) + assert start.called From a1fd28782749bab61336d8582ece94461e081589 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Jul 2017 08:38:10 -0700 Subject: [PATCH 51/86] Add tests for helper_threads. --- .../pubsub_v1/subscriber/helper_threads.py | 2 +- .../subscriber/test_helper_threads.py | 82 +++++++++++++++++++ 2 files changed, 83 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py b/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py index 07ada2a0def3..6fc775cae634 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py @@ -80,7 +80,7 @@ def stop(self, name): """ # Attempt to retrieve the thread; if it is gone already, no-op. helper_thread = self._helper_threads.get(name) - if helper_thread.thread is None: + if helper_thread is None: return # Join the thread if it is still alive. diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 216c3c8dce89..a5f2c868f994 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -27,3 +27,85 @@ def test_start(): with mock.patch.object(threading.Thread, 'start', autospec=True) as start: registry.start('foo', queue_, target) assert start.called + + +def test_stop_noop(): + registry = helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +def test_stop_dead_thread(): + registry = helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +@mock.patch.object(queue.Queue, 'put') +@mock.patch.object(threading.Thread, 'is_alive') +@mock.patch.object(threading.Thread, 'join') +def test_stop_alive_thread(join, is_alive, put): + is_alive.return_value = True + + # Set up a registry with a helper thread in it. + registry = helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = helper_threads._HelperThread( + name='foo', + queue=queue.Queue(), + thread=threading.Thread(target=lambda: None), + ) + + # Assert that the helper thread is present, and removed correctly + # on stop. + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + # Assert that all of our mocks were called in the expected manner. + is_alive.assert_called_once_with() + join.assert_called_once_with() + put.assert_called_once_with(helper_threads.STOP) + + +def test_stop_all(): + registry = helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_stop_all_noop(): + registry = helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_queue_callback_thread(): + queue_ = queue.Queue() + callback = mock.Mock(spec=()) + qct = helper_threads.QueueCallbackThread(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = (mock.sentinel.A, helper_threads.STOP) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 2 + assert get.mock_calls[0][1][0] == mock.sentinel.A + assert get.mock_calls[1][1][0] == helper_threads.STOP + callback.assert_called_once_with(mock.sentinel.A) From 32701e16d42649f24e88957e58fe6c3ff63e3e8e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Jul 2017 10:38:52 -0700 Subject: [PATCH 52/86] Almost done with unit tests. --- .../pubsub_v1/subscriber/helper_threads.py | 3 + pubsub/google/cloud/pubsub_v1/types.py | 8 +- pubsub/nox.py | 2 +- .../subscriber/test_helper_threads.py | 2 - .../unit/pubsub_v1/subscriber/test_message.py | 92 +++++++++++++++ .../subscriber/test_policy_thread.py | 108 ++++++++++++++++++ pubsub/tests/unit/test_pubsub.py | 22 ++++ 7 files changed, 233 insertions(+), 4 deletions(-) create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_message.py create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py create mode 100644 pubsub/tests/unit/test_pubsub.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py b/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py index 6fc775cae634..42bfab4b4a51 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/helper_threads.py @@ -43,6 +43,9 @@ class HelperThreadRegistry(object): def __init__(self): self._helper_threads = {} + def __contains__(self, needle): + return needle in self._helper_threads + def start(self, name, queue, target, *args, **kwargs): """Create and start a helper thread. diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index f7ab43d1ea4e..f770da096a39 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -21,6 +21,7 @@ from google.cloud.proto.pubsub.v1 import pubsub_pb2 from google.gax.utils.messages import get_messages +from google.protobuf import timestamp_pb2 # Define the default values for batching. @@ -51,7 +52,12 @@ ) -names = ['BatchSettings', 'FlowControl'] +# Pub/Sub uses timestamps from the common protobuf package. +# Do not make users import from there. +Timestamp = timestamp_pb2.Timestamp + + +names = ['BatchSettings', 'FlowControl', 'Timestamp'] for name, message in get_messages(pubsub_pb2).items(): setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/pubsub/nox.py b/pubsub/nox.py index 0950c73db088..cc06d5bf1b2a 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -37,7 +37,7 @@ def unit_tests(session, python_version): # Run py.test against the unit tests. session.run('py.test', '--quiet', '--cov-append', '--cov-report=', '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', - '--cov=tests.unit', '--cov-config=.coveragerc', 'tests/unit' + '--cov-config=.coveragerc', 'tests/unit' ) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index a5f2c868f994..f1234eefc4b2 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -106,6 +106,4 @@ def test_queue_callback_thread(): # Assert that we got the expected calls. assert get.call_count == 2 - assert get.mock_calls[0][1][0] == mock.sentinel.A - assert get.mock_calls[1][1][0] == helper_threads.STOP callback.assert_called_once_with(mock.sentinel.A) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py new file mode 100644 index 000000000000..dc9d4de055de --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -0,0 +1,92 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import mock + +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_message(data, ack_id='ACKID', **attrs): + client = subscriber.Client() + policy = thread.Policy(client, 'sub_name') + with mock.patch.object(message.Message, 'lease') as lease: + with mock.patch.object(time, 'time') as time_: + time_.return_value = 1335020400 + msg = message.Message(policy, ack_id, types.PubsubMessage( + attributes=attrs, + data=data, + message_id='message_id', + publish_time=types.Timestamp(seconds=1335020400 - 86400), + )) + lease.assert_called_once_with() + return msg + + +def test_attributes(): + msg = create_message(b'foo', baz='bacon', spam='eggs') + assert msg.attributes == {'baz': 'bacon', 'spam': 'eggs'} + + +def test_data(): + msg = create_message(b'foo') + assert msg.data == b'foo' + + +def test_publish_time(): + msg = create_message(b'foo') + assert msg.publish_time == types.Timestamp(seconds=1335020400 - 86400) + + +def test_ack(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(thread.Policy, 'ack') as ack: + with mock.patch.object(message.Message, 'drop') as drop: + msg.ack() + ack.assert_called_once_with('bogus_ack_id') + drop.assert_called_once_with() + + +def test_drop(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(thread.Policy, 'drop') as drop: + msg.drop() + drop.assert_called_once_with('bogus_ack_id') + + +def test_lease(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(thread.Policy, 'lease') as lease: + msg.lease() + lease.assert_called_once_with('bogus_ack_id') + + +def test_modify_ack_deadline(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(thread.Policy, 'modify_ack_deadline') as mad: + msg.modify_ack_deadline(60) + mad.assert_called_once_with('bogus_ack_id', 60) + + +def test_nack(): + msg = create_message(b'foo') + with mock.patch.object(message.Message, 'modify_ack_deadline') as mad: + with mock.patch.object(message.Message, 'drop') as drop: + msg.nack() + mad.assert_called_once_with(seconds=0) + drop.assert_called_once_with() diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py new file mode 100644 index 000000000000..11a57893002f --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -0,0 +1,108 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +import grpc + +import mock + +import pytest + +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import helper_threads +from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(): + client = subscriber.Client() + return thread.Policy(client, 'sub_name') + + +def test_init(): + policy = create_policy() + policy._callback(None) + + +def test_close(): + policy = create_policy() + consumer = policy._consumer + with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: + policy.close() + stop_consuming.assert_called_once_with() + assert 'callback request worker' not in policy._consumer.helper_threads + + +@mock.patch.object(helper_threads.HelperThreadRegistry, 'start') +@mock.patch.object(threading.Thread, 'start') +def test_open(thread_start, htr_start): + policy = create_policy() + with mock.patch.object(policy._consumer, 'start_consuming') as consuming: + policy.open(mock.sentinel.CALLBACK) + assert policy._callback is mock.sentinel.CALLBACK + consuming.assert_called_once_with() + htr_start.assert_called() + thread_start.assert_called() + + +def test_on_callback_request(): + policy = create_policy() + with mock.patch.object(policy, 'call_rpc') as call_rpc: + policy.on_callback_request(('call_rpc', 'something', 42)) + call_rpc.assert_called_once_with('something', 42) + + +def test_on_exception_deadline_exceeded(): + policy = create_policy() + exc = mock.Mock(spec=('code',)) + exc.code.return_value = grpc.StatusCode.DEADLINE_EXCEEDED + assert policy.on_exception(exc) is None + + +def test_on_exception_other(): + policy = create_policy() + exc = TypeError('wahhhhhh') + with pytest.raises(TypeError): + policy.on_exception(exc) + + +def test_on_response(): + callback = mock.Mock(spec=()) + + # Set up the policy. + policy = create_policy() + policy._callback = callback + + # Set up the messages to send. + messages = ( + types.PubsubMessage(data=b'foo', message_id='1'), + types.PubsubMessage(data=b'bar', message_id='2'), + ) + + # Set up a valid response. + response = types.StreamingPullResponse( + received_messages=[ + {'ack_id': 'fack', 'message': messages[0]}, + {'ack_id': 'back', 'message': messages[1]}, + ], + ) + + # Actually run the method and prove that the callback was + # called in the expected way. + policy.on_response(response) + assert callback.call_count == 2 + for call in callback.mock_calls: + assert isinstance(call[1][0], message.Message) diff --git a/pubsub/tests/unit/test_pubsub.py b/pubsub/tests/unit/test_pubsub.py new file mode 100644 index 000000000000..605dbddd7601 --- /dev/null +++ b/pubsub/tests/unit/test_pubsub.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import pubsub +from google.cloud import pubsub_v1 + + +def test_exported_things(): + assert pubsub.PublisherClient is pubsub_v1.PublisherClient + assert pubsub.SubscriberClient is pubsub_v1.SubscriberClient + assert pubsub.types is pubsub_v1.types From 34272addf41ad202032b16d2dda3a7193d4597e0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 27 Jul 2017 14:06:29 -0700 Subject: [PATCH 53/86] Full coverage. --- .../cloud/pubsub_v1/subscriber/policy/base.py | 10 +- .../pubsub_v1/subscriber/policy/thread.py | 1 - pubsub/nox.py | 5 +- .../pubsub_v1/subscriber/test_policy_base.py | 139 ++++++++++++++++++ 4 files changed, 147 insertions(+), 8 deletions(-) create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index d03c85cb8f55..b52d03ba6078 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -21,8 +21,6 @@ import six -from google import gax - from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import consumer from google.cloud.pubsub_v1.subscriber import histogram @@ -81,7 +79,11 @@ def ack_deadline(self): Returns: int: The correct ack deadline. """ - if len(self.histogram) > self._last_histogram_size * 2: + target = min([ + self._last_histogram_size * 2, + self._last_histogram_size + 100, + ]) + if len(self.histogram) > target: self._ack_deadline = self.histogram.percentile(percent=99) return self._ack_deadline @@ -220,7 +222,7 @@ def nack(self, ack_id): Args: ack_id (str): The ack ID. """ - return self.modify_ack_deadline(ack_id, 0) + return self.modify_ack_deadline(ack_id=ack_id, seconds=0) @abc.abstractmethod def on_response(self, response): diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index acb318006625..0b0603ee7eac 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -40,7 +40,6 @@ def __init__(self, client, subscription): self._callback = lambda message: None # Create a manager for keeping track of shared state. - self._managed_ack_ids = set() self._request_queue = queue.Queue() # Call the superclass constructor. diff --git a/pubsub/nox.py b/pubsub/nox.py index cc06d5bf1b2a..6931878c54e1 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -94,6 +94,5 @@ def cover(session): """ session.interpreter = 'python3.6' session.install('coverage', 'pytest-cov') - session.run('coverage', 'html', '--fail-under=0') - # session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase', success_codes=(0, 1)) + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py new file mode 100644 index 000000000000..3c133be5d809 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -0,0 +1,139 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import mock + +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(): + client = subscriber.Client() + return thread.Policy(client, 'sub_name') + + +def test_ack_deadline(): + policy = create_policy() + assert policy.ack_deadline == 10 + policy.histogram.add(20) + assert policy.ack_deadline == 20 + policy.histogram.add(10) + assert policy.ack_deadline == 20 + + +def test_initial_request(): + policy = create_policy() + initial_request = policy.initial_request + assert isinstance(initial_request, types.StreamingPullRequest) + assert initial_request.subscription == 'sub_name' + assert initial_request.stream_ack_deadline_seconds == 10 + + +def test_managed_ack_ids(): + policy = create_policy() + + # Ensure we always get a set back, even if the property is not yet set. + managed_ack_ids = policy.managed_ack_ids + assert isinstance(managed_ack_ids, set) + + # Ensure that multiple calls give the same actual object back. + assert managed_ack_ids is policy.managed_ack_ids + + +def test_subscription(): + policy = create_policy() + assert policy.subscription == 'sub_name' + + +def test_ack(): + policy = create_policy() + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string') + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + + +def test_call_rpc(): + policy = create_policy() + with mock.patch.object(policy._client.api, 'streaming_pull') as pull: + policy.call_rpc(mock.sentinel.GENERATOR) + pull.assert_called_once_with(mock.sentinel.GENERATOR) + + +def test_drop(): + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy.drop('ack_id_string') + assert len(policy.managed_ack_ids) == 0 + + +def test_modify_ack_deadline(): + policy = create_policy() + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.modify_ack_deadline('ack_id_string', 60) + send_request.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['ack_id_string'], + modify_deadline_seconds=[60], + )) + + +def test_maintain_leases_inactive_consumer(): + policy = create_policy() + policy._consumer.active = False + assert policy.maintain_leases() is None + + +def test_maintain_leases_ack_ids(): + policy = create_policy() + policy._consumer.active = True + policy.lease('my ack id') + + # Mock the sleep object. + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + + # Also mock the consumer, which sends the request. + with mock.patch.object(policy._consumer, 'send_request') as send: + policy.maintain_leases() + send.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['my ack id'], + modify_deadline_seconds=[10], + )) + sleep.assert_called() + + +def test_maintain_leases_no_ack_ids(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + policy.maintain_leases() + sleep.assert_called() + + +def test_nack(): + policy = create_policy() + with mock.patch.object(policy, 'modify_ack_deadline') as mad: + policy.nack('ack_id_string') + mad.assert_called_once_with(ack_id='ack_id_string', seconds=0) From e1c7c84c01f23aa88a777fa5b11f91ac50d5dece Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 31 Jul 2017 12:08:40 -0700 Subject: [PATCH 54/86] Do not send policy across the concurrency boundary. --- .../cloud/pubsub_v1/subscriber/message.py | 23 ++++++++-------- .../cloud/pubsub_v1/subscriber/policy/base.py | 7 ++++- .../pubsub_v1/subscriber/policy/thread.py | 4 +-- .../unit/pubsub_v1/subscriber/test_message.py | 27 +++++++++---------- .../pubsub_v1/subscriber/test_policy_base.py | 12 +++++++++ 5 files changed, 43 insertions(+), 30 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index e2bf9415f9a8..83797a248fc0 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -42,7 +42,7 @@ class Message(object): publish_time (datetime): The time that this message was originally published. """ - def __init__(self, policy, ack_id, message): + def __init__(self, message, ack_id, request_queue): """Construct the Message. .. note:: @@ -51,16 +51,16 @@ def __init__(self, policy, ack_id, message): responsibility of :class:`BasePolicy` subclasses to do so. Args: - policy (~.pubsub_v1.subscriber.policy.BasePolicy): The policy - that created this message, and understands how to handle - actions from that message (e.g. acks). - ack_id (str): The ack_id received from Pub/Sub. message (~.pubsub_v1.types.PubsubMessage): The message received from Pub/Sub. + ack_id (str): The ack_id received from Pub/Sub. + request_queue (queue.Queue): A queue provided by the policy that + can accept requests; the policy is responsible for handling + those requests. """ - self._policy = policy - self._ack_id = ack_id self._message = message + self._ack_id = ack_id + self._request_queue = request_queue self.message_id = message.message_id # The instantiation time is the time that this message @@ -127,8 +127,7 @@ def ack(self): receive any given message more than once. """ time_to_ack = math.ceil(time.time() - self._received_timestamp) - self._policy.histogram.add(time_to_ack) - self._policy.ack(self._ack_id) + self._request_queue.put(('ack', self._ack_id, time_to_ack)) self.drop() def drop(self): @@ -144,7 +143,7 @@ def drop(self): both call this one. You probably do not want to call this method directly. """ - self._policy.drop(self._ack_id) + self._request_queue.put(('drop', self._ack_id)) def lease(self): """Inform the policy to lease this message continually. @@ -153,7 +152,7 @@ def lease(self): This method is called by the constructor, and you should never need to call it manually. """ - self._policy.lease(self._ack_id) + self._request_queue.put(('lease', self._ack_id)) def modify_ack_deadline(self, seconds): """Set the deadline for acknowledgement to the given value. @@ -173,7 +172,7 @@ def modify_ack_deadline(self, seconds): to. This should be between 0 and 600. Due to network latency, values below 10 are advised against. """ - self._policy.modify_ack_deadline(self._ack_id, seconds) + self._request_queue.put(('modify_ack_deadline', self._ack_id, seconds)) def nack(self): """Decline to acknowldge the given message. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index b52d03ba6078..98f4789c181e 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -119,12 +119,17 @@ def subscription(self): """ return self._subscription - def ack(self, ack_id): + def ack(self, ack_id, time_to_ack=None): """Acknowledge the message corresponding to the given ack_id. Args: ack_id (str): The ack ID. + time_to_ack (int): The time it took to ack the message, measured + from when it was received from the subscription. This is used + to improve the automatic ack timing. """ + if time_to_ack is not None: + self.histogram.add(int(time_to_ack)) request = types.StreamingPullRequest(ack_ids=[ack_id]) self._consumer.send_request(request) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 0b0603ee7eac..e0371740c9db 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -39,7 +39,7 @@ def __init__(self, client, subscription): # Default the callback to a no-op; it is provided by `.open`. self._callback = lambda message: None - # Create a manager for keeping track of shared state. + # Create a queue for keeping track of shared state. self._request_queue = queue.Queue() # Call the superclass constructor. @@ -113,5 +113,5 @@ def on_response(self, response): """ for msg in response.received_messages: logger.debug('New message received from Pub/Sub: %r', msg) - message = Message(self, msg.ack_id, msg.message) + message = Message(msg.message, msg.ack_id, self._request_queue) self._executor.submit(self._callback, message) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index dc9d4de055de..ca132a567ef5 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -12,28 +12,25 @@ # See the License for the specific language governing permissions and # limitations under the License. +import queue import time import mock -from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message -from google.cloud.pubsub_v1.subscriber.policy import thread def create_message(data, ack_id='ACKID', **attrs): - client = subscriber.Client() - policy = thread.Policy(client, 'sub_name') with mock.patch.object(message.Message, 'lease') as lease: with mock.patch.object(time, 'time') as time_: time_.return_value = 1335020400 - msg = message.Message(policy, ack_id, types.PubsubMessage( + msg = message.Message(types.PubsubMessage( attributes=attrs, data=data, message_id='message_id', publish_time=types.Timestamp(seconds=1335020400 - 86400), - )) + ), ack_id, queue.Queue()) lease.assert_called_once_with() return msg @@ -55,32 +52,32 @@ def test_publish_time(): def test_ack(): msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(thread.Policy, 'ack') as ack: + with mock.patch.object(msg._request_queue, 'put') as put: with mock.patch.object(message.Message, 'drop') as drop: msg.ack() - ack.assert_called_once_with('bogus_ack_id') + put.assert_called_once_with(('ack', 'bogus_ack_id', mock.ANY)) drop.assert_called_once_with() def test_drop(): msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(thread.Policy, 'drop') as drop: + with mock.patch.object(msg._request_queue, 'put') as put: msg.drop() - drop.assert_called_once_with('bogus_ack_id') + put.assert_called_once_with(('drop', 'bogus_ack_id')) def test_lease(): msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(thread.Policy, 'lease') as lease: + with mock.patch.object(msg._request_queue, 'put') as put: msg.lease() - lease.assert_called_once_with('bogus_ack_id') + put.assert_called_once_with(('lease', 'bogus_ack_id')) def test_modify_ack_deadline(): - msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(thread.Policy, 'modify_ack_deadline') as mad: + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: msg.modify_ack_deadline(60) - mad.assert_called_once_with('bogus_ack_id', 60) + put.assert_called_once_with(('modify_ack_deadline', 'bogus_id', 60)) def test_nack(): diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 3c133be5d809..563159fa4bed 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -60,12 +60,24 @@ def test_subscription(): def test_ack(): + policy = create_policy() + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string', 20) + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + assert len(policy.histogram) == 1 + assert 20 in policy.histogram + + +def test_ack_no_time(): policy = create_policy() with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string') send_request.assert_called_once_with(types.StreamingPullRequest( ack_ids=['ack_id_string'], )) + assert len(policy.histogram) == 0 def test_call_rpc(): From 2b21f48edbce3db6260854dfc6b9434dc9c952e4 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 31 Jul 2017 13:53:43 -0700 Subject: [PATCH 55/86] Shift flow control to the policy class. --- .../cloud/pubsub_v1/subscriber/client.py | 12 +++++------ .../cloud/pubsub_v1/subscriber/policy/base.py | 6 +++++- .../pubsub_v1/subscriber/policy/thread.py | 20 +++++++++++++++++-- 3 files changed, 28 insertions(+), 10 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index 58fa66881ba7..afb9f7d7ca75 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -19,6 +19,7 @@ from google.cloud.gapic.pubsub.v1 import subscriber_client from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber.policy import thread @@ -35,9 +36,6 @@ class Client(object): get sensible defaults. Args: - flow_control (~.pubsub_v1.types.FlowControl): The flow control - settings. Use this to prevent situations where you are - inundated with too many messages at once. policy_class (class): A class that describes how to handle subscriptions. You may subclass the :class:`.pubsub_v1.subscriber.policy.base.BasePolicy` @@ -50,8 +48,7 @@ class in order to define your own consumer. This is primarily Generally, you should not need to set additional keyword arguments. """ - def __init__(self, flow_control=(), policy_class=thread.Policy, - **kwargs): + def __init__(self, policy_class=thread.Policy, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. kwargs['lib_name'] = 'gccl' @@ -62,7 +59,7 @@ def __init__(self, flow_control=(), policy_class=thread.Policy, # messages. self._policy_class = policy_class - def subscribe(self, subscription, callback=None): + def subscribe(self, subscription, callback=None, flow_control=()): """Return a representation of an individual subscription. This method creates and returns a ``Consumer`` object (that is, a @@ -94,7 +91,8 @@ def subscribe(self, subscription, callback=None): ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance of the defined ``consumer_class`` on the client. """ - subscr = self._policy_class(self, subscription) + flow_control = types.FlowControl(*flow_control) + subscr = self._policy_class(self, subscription, flow_control) if callable(callback): subscr.open(callback) return subscr diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 98f4789c181e..68dc2941e371 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -41,7 +41,8 @@ class BasePolicy(object): subclasses may be passed as the ``policy_class`` argument to :class:`~.pubsub_v1.client.SubscriberClient`. """ - def __init__(self, client, subscription, histogram_data=None): + def __init__(self, client, subscription, + flow_control=types.FlowControl(), histogram_data=None): """Instantiate the policy. Args: @@ -50,6 +51,8 @@ def __init__(self, client, subscription, histogram_data=None): subscription (str): The name of the subscription. The canonical format for this is ``projects/{project}/subscriptions/{subscription}``. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. histogram_data (dict): Optional: A structure to store the histogram data for predicting appropriate ack times. If set, this should be a dictionary-like object. @@ -66,6 +69,7 @@ def __init__(self, client, subscription, histogram_data=None): self._consumer = consumer.Consumer(self) self._ack_deadline = 10 self._last_histogram_size = 0 + self.flow_control = flow_control self.histogram = histogram.Histogram(data=histogram_data) @property diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index e0371740c9db..ac69b3ec46dc 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -21,6 +21,7 @@ import grpc +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import helper_threads from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber.message import Message @@ -35,7 +36,18 @@ class Policy(base.BasePolicy): This consumer handles the connection to the Pub/Sub service and all of the concurrency needs. """ - def __init__(self, client, subscription): + def __init__(self, client, subscription, flow_control=types.FlowControl()): + """Instantiate the policy. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. + """ # Default the callback to a no-op; it is provided by `.open`. self._callback = lambda message: None @@ -43,7 +55,11 @@ def __init__(self, client, subscription): self._request_queue = queue.Queue() # Call the superclass constructor. - super(Policy, self).__init__(client, subscription) + super(Policy, self).__init__( + client=client, + flow_control=flow_control, + subscription=subscription, + ) # Also maintain a request queue and an executor. logger.debug('Creating callback requests thread (not starting).') From 7f4b91c0fac80760ec5092b38ffd04e090f6ff4d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 31 Jul 2017 14:33:26 -0700 Subject: [PATCH 56/86] Move the request queue to using keyword arguments. --- .../cloud/pubsub_v1/subscriber/message.py | 25 ++++++++++++++++--- .../cloud/pubsub_v1/subscriber/policy/base.py | 16 +++++++++--- .../pubsub_v1/subscriber/policy/thread.py | 4 +-- .../unit/pubsub_v1/subscriber/test_message.py | 20 ++++++++++++--- .../pubsub_v1/subscriber/test_policy_base.py | 23 +++++++++++++++-- .../subscriber/test_policy_thread.py | 4 +-- 6 files changed, 74 insertions(+), 18 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 83797a248fc0..0fdf73643f54 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -112,6 +112,11 @@ def publish_time(self): """ return self._message.publish_time + @property + def size(self): + """Return the size of the underlying message, in bytes.""" + return self._message.ByteSize() + def ack(self): """Acknowledge the given message. @@ -127,7 +132,10 @@ def ack(self): receive any given message more than once. """ time_to_ack = math.ceil(time.time() - self._received_timestamp) - self._request_queue.put(('ack', self._ack_id, time_to_ack)) + self._request_queue.put(('ack', { + 'ack_id': self._ack_id, + 'time_to_ack': time_to_ack, + })) self.drop() def drop(self): @@ -143,7 +151,10 @@ def drop(self): both call this one. You probably do not want to call this method directly. """ - self._request_queue.put(('drop', self._ack_id)) + self._request_queue.put(('drop', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) def lease(self): """Inform the policy to lease this message continually. @@ -152,7 +163,10 @@ def lease(self): This method is called by the constructor, and you should never need to call it manually. """ - self._request_queue.put(('lease', self._ack_id)) + self._request_queue.put(('lease', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) def modify_ack_deadline(self, seconds): """Set the deadline for acknowledgement to the given value. @@ -172,7 +186,10 @@ def modify_ack_deadline(self, seconds): to. This should be between 0 and 600. Due to network latency, values below 10 are advised against. """ - self._request_queue.put(('modify_ack_deadline', self._ack_id, seconds)) + self._request_queue.put(('modify_ack_deadline', { + 'ack_id': self._ack_id, + 'seconds': seconds, + })) def nack(self): """Decline to acknowldge the given message. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 68dc2941e371..a2701b7b9ebb 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -69,6 +69,7 @@ def __init__(self, client, subscription, self._consumer = consumer.Consumer(self) self._ack_deadline = 10 self._last_histogram_size = 0 + self._bytes = 0 self.flow_control = flow_control self.histogram = histogram.Histogram(data=histogram_data) @@ -147,21 +148,28 @@ def call_rpc(self, request_generator): """ return self._client.api.streaming_pull(request_generator) - def drop(self, ack_id): + def drop(self, ack_id, byte_size): """Remove the given ack ID from lease management. Args: ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. """ - self.managed_ack_ids.remove(ack_id) + if ack_id in self.managed_ack_ids: + self.managed_ack_ids.remove(ack_id) + self._bytes -= byte_size + self._bytes = min([self._bytes, 0]) - def lease(self, ack_id): + def lease(self, ack_id, byte_size): """Add the given ack ID to lease management. Args: ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. """ - self.managed_ack_ids.add(ack_id) + if ack_id not in self.managed_ack_ids: + self.managed_ack_ids.add(ack_id) + self._bytes += byte_size def maintain_leases(self): """Maintain all of the leases being managed by the policy. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index ac69b3ec46dc..1f29f53c92f1 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -105,8 +105,8 @@ def open(self, callback): def on_callback_request(self, callback_request): """Map the callback request to the appropriate GRPC request.""" - action, args = callback_request[0], callback_request[1:] - getattr(self, action)(*args) + action, kwargs = callback_request[0], callback_request[1] + getattr(self, action)(**kwargs) def on_exception(self, exception): """Bubble the exception. diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index ca132a567ef5..391a6db59240 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -55,7 +55,10 @@ def test_ack(): with mock.patch.object(msg._request_queue, 'put') as put: with mock.patch.object(message.Message, 'drop') as drop: msg.ack() - put.assert_called_once_with(('ack', 'bogus_ack_id', mock.ANY)) + put.assert_called_once_with(('ack', { + 'ack_id': 'bogus_ack_id', + 'time_to_ack': mock.ANY, + })) drop.assert_called_once_with() @@ -63,21 +66,30 @@ def test_drop(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.drop() - put.assert_called_once_with(('drop', 'bogus_ack_id')) + put.assert_called_once_with(('drop', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) def test_lease(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.lease() - put.assert_called_once_with(('lease', 'bogus_ack_id')) + put.assert_called_once_with(('lease', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) def test_modify_ack_deadline(): msg = create_message(b'foo', ack_id='bogus_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.modify_ack_deadline(60) - put.assert_called_once_with(('modify_ack_deadline', 'bogus_id', 60)) + put.assert_called_once_with(('modify_ack_deadline', { + 'ack_id': 'bogus_id', + 'seconds': 60, + })) def test_nack(): diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 563159fa4bed..42f416cb50ba 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -90,8 +90,15 @@ def test_call_rpc(): def test_drop(): policy = create_policy() policy.managed_ack_ids.add('ack_id_string') - policy.drop('ack_id_string') + policy._bytes = 20 + policy.drop('ack_id_string', 20) assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + + # Do this again to establish idempotency. + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 def test_modify_ack_deadline(): @@ -113,7 +120,7 @@ def test_maintain_leases_inactive_consumer(): def test_maintain_leases_ack_ids(): policy = create_policy() policy._consumer.active = True - policy.lease('my ack id') + policy.lease('my ack id', 50) # Mock the sleep object. with mock.patch.object(time, 'sleep', autospec=True) as sleep: @@ -144,6 +151,18 @@ def trigger_inactive(seconds): sleep.assert_called() +def test_lease(): + policy = create_policy() + policy.lease('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + # Do this again to prove idempotency. + policy.lease('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + def test_nack(): policy = create_policy() with mock.patch.object(policy, 'modify_ack_deadline') as mad: diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 11a57893002f..d87848a76d9d 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -61,8 +61,8 @@ def test_open(thread_start, htr_start): def test_on_callback_request(): policy = create_policy() with mock.patch.object(policy, 'call_rpc') as call_rpc: - policy.on_callback_request(('call_rpc', 'something', 42)) - call_rpc.assert_called_once_with('something', 42) + policy.on_callback_request(('call_rpc', {'something': 42})) + call_rpc.assert_called_once_with(something=42) def test_on_exception_deadline_exceeded(): From 3852805e8b9115f491cfdc60a3eb9b3f729df8b2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 1 Aug 2017 11:15:35 -0700 Subject: [PATCH 57/86] Can has flow control. --- .../cloud/pubsub_v1/subscriber/consumer.py | 5 +- .../cloud/pubsub_v1/subscriber/message.py | 8 +- .../cloud/pubsub_v1/subscriber/policy/base.py | 168 ++++++++++++++---- pubsub/google/cloud/pubsub_v1/types.py | 3 +- .../unit/pubsub_v1/subscriber/test_message.py | 15 +- .../pubsub_v1/subscriber/test_policy_base.py | 71 +++++++- 6 files changed, 217 insertions(+), 53 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py index a3f237d7fe63..68a9bd386201 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/consumer.py @@ -200,10 +200,11 @@ def _request_generator_thread(self): """ # First, yield the initial request. This occurs on every new # connection, fundamentally including a resumed connection. + initial_request = self._policy.get_initial_request(ack_queue=True) _LOGGER.debug('Sending initial request: {initial_request}'.format( - initial_request=self._policy.initial_request, + initial_request=initial_request, )) - yield self._policy.initial_request + yield initial_request # Now yield each of the items on the request queue, and block if there # are none. This can and must block to keep the stream open. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 0fdf73643f54..bb85823664c0 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -134,9 +134,9 @@ def ack(self): time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put(('ack', { 'ack_id': self._ack_id, + 'byte_size': self.size, 'time_to_ack': time_to_ack, })) - self.drop() def drop(self): """Release the message from lease management. @@ -196,5 +196,7 @@ def nack(self): This will cause the message to be re-delivered to the subscription. """ - self.modify_ack_deadline(seconds=0) - self.drop() + self._request_queue.put(('nack', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index a2701b7b9ebb..dd416886b5d1 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import absolute_import +from __future__ import absolute_import, division import abc import logging @@ -69,10 +69,15 @@ def __init__(self, client, subscription, self._consumer = consumer.Consumer(self) self._ack_deadline = 10 self._last_histogram_size = 0 - self._bytes = 0 self.flow_control = flow_control self.histogram = histogram.Histogram(data=histogram_data) + # These are for internal flow control tracking. + # They should not need to be used by subclasses. + self._bytes = 0 + self._ack_on_resume = set() + self._paused = False + @property def ack_deadline(self): """Return the appropriate ack deadline. @@ -92,18 +97,6 @@ def ack_deadline(self): self._ack_deadline = self.histogram.percentile(percent=99) return self._ack_deadline - @property - def initial_request(self): - """Return the initial request. - - This defines the initial request that must always be sent to Pub/Sub - immediately upon opening the subscription. - """ - return types.StreamingPullRequest( - stream_ack_deadline_seconds=self.histogram.percentile(99), - subscription=self.subscription, - ) - @property def managed_ack_ids(self): """Return the ack IDs currently being managed by the policy. @@ -124,7 +117,29 @@ def subscription(self): """ return self._subscription - def ack(self, ack_id, time_to_ack=None): + @property + def _load(self): + """Return the current load. + + The load is represented as a float, where 1.0 represents having + hit one of the flow control limits, and values between 0.0 and 1.0 + represent how close we are to them. (0.5 means we have exactly half + of what the flow control setting allows, for example.) + + There are (currently) two flow control settings; this property + computes how close the subscriber is to each of them, and returns + whichever value is higher. (It does not matter that we have lots of + running room on setting A if setting B is over.) + + Returns: + float: The load value. + """ + return max([ + len(self.managed_ack_ids) / self.flow_control.max_messages, + self._bytes / self.flow_control.max_bytes, + ]) + + def ack(self, ack_id, time_to_ack=None, byte_size=None): """Acknowledge the message corresponding to the given ack_id. Args: @@ -132,11 +147,24 @@ def ack(self, ack_id, time_to_ack=None): time_to_ack (int): The time it took to ack the message, measured from when it was received from the subscription. This is used to improve the automatic ack timing. + byte_size (int): The size of the PubSub message, in bytes. """ + # If we got timing information, add it to the histogram. if time_to_ack is not None: self.histogram.add(int(time_to_ack)) - request = types.StreamingPullRequest(ack_ids=[ack_id]) - self._consumer.send_request(request) + + # Send the request to ack the message. + # However, if the consumer is inactive, then queue the ack_id here + # instead; it will be acked as part of the initial request when the + # consumer is started again. + if self._consumer.active: + request = types.StreamingPullRequest(ack_ids=[ack_id]) + self._consumer.send_request(request) + else: + self._ack_on_resume.add(ack_id) + + # Remove the message from lease management. + self.drop(ack_id=ack_id, byte_size=byte_size) def call_rpc(self, request_generator): """Invoke the Pub/Sub streaming pull RPC. @@ -155,11 +183,70 @@ def drop(self, ack_id, byte_size): ack_id (str): The ack ID. byte_size (int): The size of the PubSub message, in bytes. """ + # Remove the ack ID from lease management, and decrement the + # byte counter. if ack_id in self.managed_ack_ids: self.managed_ack_ids.remove(ack_id) self._bytes -= byte_size self._bytes = min([self._bytes, 0]) + # If we have been paused by flow control, check and see if we are + # back within our limits. + # + # In order to not thrash too much, require us to have passed below + # the resume threshold (80% by default) of each flow control setting + # before restarting. + if self._paused and self._load < self.flow_control.resume_threshold: + self._paused = False + self.open(self._callback) + + def get_initial_request(self, ack_queue=False): + """Return the initial request. + + This defines the initial request that must always be sent to Pub/Sub + immediately upon opening the subscription. + + Args: + ack_queue (bool): Whether to include any acks that were sent + while the connection was paused. + + Returns: + ~.pubsub_v1.types.StreamingPullRequest: A request suitable + for being the first request on the stream (and not suitable + for any other purpose). + + .. note:: + If ``ack_queue`` is set to True, this includes the ack_ids, but + also clears the internal set. + + This means that calls to :meth:`get_initial_request` with + ``ack_queue`` set to True are not idempotent. + """ + # Any ack IDs that are under lease management and not being acked + # need to have their deadline extended immediately. + ack_ids = set() + lease_ids = self.managed_ack_ids + if ack_queue: + ack_ids = self._ack_on_resume + lease_ids = lease_ids.difference(ack_ids) + + # Put the request together. + request = types.StreamingPullRequest( + ack_ids=list(ack_ids), + modify_deadline_ack_ids=list(lease_ids), + modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), + stream_ack_deadline_seconds=self.histogram.percentile(99), + subscription=self.subscription, + ) + + # Clear the ack_ids set. + # Note: If `ack_queue` is False, this just ends up being a no-op, + # since the set is just an empty set. + ack_ids.clear() + + # Return the initial request. + return request + def lease(self, ack_id, byte_size): """Add the given ack ID to lease management. @@ -167,10 +254,18 @@ def lease(self, ack_id, byte_size): ack_id (str): The ack ID. byte_size (int): The size of the PubSub message, in bytes. """ + # Add the ack ID to the set of managed ack IDs, and increment + # the size counter. if ack_id not in self.managed_ack_ids: self.managed_ack_ids.add(ack_id) self._bytes += byte_size + # Sanity check: Do we have too many things in our inventory? + # If we do, we need to stop the stream. + if self._load >= 1.0: + self._paused = True + self.close() + def maintain_leases(self): """Maintain all of the leases being managed by the policy. @@ -202,7 +297,7 @@ def maintain_leases(self): # it is more efficient to make a single request. ack_ids = list(self.managed_ack_ids) logger.debug('Renewing lease for %d ack IDs.' % len(ack_ids)) - if len(ack_ids) > 0: + if len(ack_ids) > 0 and self._consumer.active: request = types.StreamingPullRequest( modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=[p99] * len(ack_ids), @@ -233,13 +328,33 @@ def modify_ack_deadline(self, ack_id, seconds): ) self._consumer.send_request(request) - def nack(self, ack_id): + def nack(self, ack_id, byte_size=None): """Explicitly deny receipt of a message. Args: ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + self.modify_ack_deadline(ack_id=ack_id, seconds=0) + self.drop(ack_id=ack_id, byte_size=byte_size) + + @abc.abstractmethod + def close(self): + """Close the existing connection.""" + raise NotImplementedError + + @abc.abstractmethod + def on_exception(self, exception): + """Called when a gRPC exception occurs. + + If this method does nothing, then the stream is re-started. If this + raises an exception, it will stop the consumer thread. + This is executed on the response consumer helper thread. + + Args: + exception (Exception): The exception raised by the RPC. """ - return self.modify_ack_deadline(ack_id=ack_id, seconds=0) + raise NotImplementedError @abc.abstractmethod def on_response(self, response): @@ -262,19 +377,6 @@ def on_response(self, response): """ raise NotImplementedError - @abc.abstractmethod - def on_exception(self, exception): - """Called when a gRPC exception occurs. - - If this method does nothing, then the stream is re-started. If this - raises an exception, it will stop the consumer thread. - This is executed on the response consumer helper thread. - - Args: - exception (Exception): The exception raised by the RPC. - """ - raise NotImplementedError - @abc.abstractmethod def open(self, callback): """Open a streaming pull connection and begin receiving messages. diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index f770da096a39..ec92ab38524d 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -44,11 +44,12 @@ # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. FlowControl = collections.namedtuple('FlowControl', - ['max_bytes', 'max_messages'], + ['max_bytes', 'max_messages', 'resume_threshold'], ) FlowControl.__new__.__defaults__ = ( psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM float('inf'), # max_messages: no limit + 0.8, # resume_threshold: 80% ) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 391a6db59240..a3a1e16f027e 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -57,9 +57,9 @@ def test_ack(): msg.ack() put.assert_called_once_with(('ack', { 'ack_id': 'bogus_ack_id', + 'byte_size': 25, 'time_to_ack': mock.ANY, })) - drop.assert_called_once_with() def test_drop(): @@ -93,9 +93,10 @@ def test_modify_ack_deadline(): def test_nack(): - msg = create_message(b'foo') - with mock.patch.object(message.Message, 'modify_ack_deadline') as mad: - with mock.patch.object(message.Message, 'drop') as drop: - msg.nack() - mad.assert_called_once_with(seconds=0) - drop.assert_called_once_with() + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.nack() + put.assert_called_once_with(('nack', { + 'ack_id': 'bogus_id', + 'byte_size': 25, + })) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 42f416cb50ba..b665e7898289 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -21,9 +21,9 @@ from google.cloud.pubsub_v1.subscriber.policy import thread -def create_policy(): +def create_policy(flow_control=types.FlowControl()): client = subscriber.Client() - return thread.Policy(client, 'sub_name') + return thread.Policy(client, 'sub_name', flow_control=flow_control) def test_ack_deadline(): @@ -35,9 +35,9 @@ def test_ack_deadline(): assert policy.ack_deadline == 20 -def test_initial_request(): +def test_get_initial_request(): policy = create_policy() - initial_request = policy.initial_request + initial_request = policy.get_initial_request() assert isinstance(initial_request, types.StreamingPullRequest) assert initial_request.subscription == 'sub_name' assert initial_request.stream_ack_deadline_seconds == 10 @@ -61,6 +61,7 @@ def test_subscription(): def test_ack(): policy = create_policy() + policy._consumer.active = True with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string', 20) send_request.assert_called_once_with(types.StreamingPullRequest( @@ -72,6 +73,7 @@ def test_ack(): def test_ack_no_time(): policy = create_policy() + policy._consumer.active = True with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string') send_request.assert_called_once_with(types.StreamingPullRequest( @@ -80,6 +82,14 @@ def test_ack_no_time(): assert len(policy.histogram) == 0 +def test_ack_paused(): + policy = create_policy() + policy._paused = True + policy._consumer.active = False + policy.ack('ack_id_string') + assert 'ack_id_string' in policy._ack_on_resume + + def test_call_rpc(): policy = create_policy() with mock.patch.object(policy._client.api, 'streaming_pull') as pull: @@ -101,6 +111,41 @@ def test_drop(): assert policy._bytes == 0 +def test_drop_below_threshold(): + """Establish that we resume a paused subscription. + + If the subscription is paused, and we drop sufficiently below + the flow control thresholds, it should resume. + """ + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 20 + policy._paused = True + with mock.patch.object(policy, 'open') as open_: + policy.drop(ack_id='ack_id_string', byte_size=20) + open_.assert_called_once_with(policy._callback) + assert policy._paused is False + + +def test_load(): + flow_control = types.FlowControl(max_messages=10, max_bytes=1000) + policy = create_policy(flow_control=flow_control) + + # This should mean that our messages count is at 10%, and our bytes + # are at 15%; the ._load property should return the higher (0.15). + policy.lease(ack_id='one', byte_size=150) + assert policy._load == 0.15 + + # After this message is added, the messages should be higher at 20% + # (versus 16% for bytes). + policy.lease(ack_id='two', byte_size=10) + assert policy._load == 0.2 + + # Returning a number above 100% is fine. + policy.lease(ack_id='three', byte_size=1000) + assert policy._load == 1.16 + + def test_modify_ack_deadline(): policy = create_policy() with mock.patch.object(policy._consumer, 'send_request') as send_request: @@ -153,18 +198,30 @@ def trigger_inactive(seconds): def test_lease(): policy = create_policy() - policy.lease('ack_id_string', 20) + policy.lease(ack_id='ack_id_string', byte_size=20) assert len(policy.managed_ack_ids) == 1 assert policy._bytes == 20 # Do this again to prove idempotency. - policy.lease('ack_id_string', 20) + policy.lease(ack_id='ack_id_string', byte_size=20) assert len(policy.managed_ack_ids) == 1 assert policy._bytes == 20 +def test_lease_above_threshold(): + flow_control = types.FlowControl(max_messages=2) + policy = create_policy(flow_control=flow_control) + with mock.patch.object(policy, 'close') as close: + policy.lease(ack_id='first_ack_id', byte_size=20) + assert close.call_count == 0 + policy.lease(ack_id='second_ack_id', byte_size=25) + close.assert_called_once_with() + + def test_nack(): policy = create_policy() with mock.patch.object(policy, 'modify_ack_deadline') as mad: - policy.nack('ack_id_string') + with mock.patch.object(policy, 'drop') as drop: + policy.nack(ack_id='ack_id_string', byte_size=10) + drop.assert_called_once_with(ack_id='ack_id_string', byte_size=10) mad.assert_called_once_with(ack_id='ack_id_string', seconds=0) From b964c11ea93003841843a89086d61d48a388fa17 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 2 Aug 2017 08:21:54 -0700 Subject: [PATCH 58/86] Start working on docs. --- docs/pubsub/publisher/api/batch.rst | 6 +++ docs/pubsub/publisher/api/client.rst | 6 +++ docs/pubsub/publisher/index.rst | 61 ++++++++++++++++++++++++++++ 3 files changed, 73 insertions(+) create mode 100644 docs/pubsub/publisher/api/batch.rst create mode 100644 docs/pubsub/publisher/api/client.rst create mode 100644 docs/pubsub/publisher/index.rst diff --git a/docs/pubsub/publisher/api/batch.rst b/docs/pubsub/publisher/api/batch.rst new file mode 100644 index 000000000000..b7556845b06a --- /dev/null +++ b/docs/pubsub/publisher/api/batch.rst @@ -0,0 +1,6 @@ +Pub/Sub Batch API +================= + +.. automodule:: google.cloud.pubsub_v1.client.batch.thread + :members: + :inherited-members: diff --git a/docs/pubsub/publisher/api/client.rst b/docs/pubsub/publisher/api/client.rst new file mode 100644 index 000000000000..9d04d4f669a7 --- /dev/null +++ b/docs/pubsub/publisher/api/client.rst @@ -0,0 +1,6 @@ +Pub/Sub Client API +================== + +.. automodule:: google.cloud.pubsub_v1.client + :members: + :inherited-members: diff --git a/docs/pubsub/publisher/index.rst b/docs/pubsub/publisher/index.rst new file mode 100644 index 000000000000..0d2a608706cb --- /dev/null +++ b/docs/pubsub/publisher/index.rst @@ -0,0 +1,61 @@ +Publishing Messages +=================== + +Publishing messages is handled through the :class:`.publisher.Client` class. +This class provides methods to create topics, and (most importantly) a +:meth:`~.pubsub_v1.publisher.Client.publish` method that publishes +messages to Pub/Sub. + +Instantiating a publishing client is straightforward:: + +.. code-block:: python + + from google.cloud import pubsub + publish_client = pubsub.publisher.Client() + + +Publish a Message +----------------- + +To publish a message, use the :meth:`~.pubsub_v1.publisher.Client.publish` +method. This method accepts two positional arguments: the topic to publish to, +and the body of the message. It also accepts arbitrary keyword arguments, +which are passed along as attributes of the message. + +The topic is passed along as a string; all topics have the canonical form of +``projects/{project_name}/topics/{topic_name}``. + +Therefore, a very basic publishing call looks like:: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}'' + publish_client.publish(topic, b'This is my message.') + +.. note:: + + The message data in Pub/Sub is an opaque blob of bytes, and as such, you + _must_ send a ``bytes`` object in Python 3 (``str`` object in Python 2). + If you send a text string (``str`` in Python 3, ``unicode`` in Python 2), + the method will raise :exc:`TypeError`. + + The reason it works this way is because there is no reasonable guarantee + that the same language or environment is being used by the subscriber, + and so it is the responsibility of the publisher to properly encode + the payload. + +If you want to include attributes, simply add keyword arguments: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}'' + publish_client.publish(topic, b'This is my message.', foo='bar') + + +Batching +-------- + +Whenever you publish a message, a +:class:`~.pubsub_v1.publisher.batch.thread.Batch` is automatically created. +This way, if you publish a large volume of messages, it reduces the number of +requests made to the server. From 81b37f48d6b360dacf742fe93ed235bcecceab1e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 3 Aug 2017 13:21:56 -0700 Subject: [PATCH 59/86] Subscription fixes. --- pubsub/google/cloud/pubsub_v1/subscriber/message.py | 3 ++- pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index bb85823664c0..1e97c324b2e9 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -74,7 +74,7 @@ def __init__(self, message, ack_id, request_queue): def __repr__(self): # Get an abbreviated version of the data. - abbv_data = self._data + abbv_data = self._message.data if len(abbv_data) > 50: abbv_data = abbv_data[0:50] + b'...' @@ -83,6 +83,7 @@ def __repr__(self): answer += ' data: {0!r}\n'.format(abbv_data) answer += ' attributes: {0!r}\n'.format(self.attributes) answer += '}' + return answer @property def attributes(self): diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 1f29f53c92f1..c03da7f81ddc 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -129,5 +129,7 @@ def on_response(self, response): """ for msg in response.received_messages: logger.debug('New message received from Pub/Sub: %r', msg) + logger.debug(self._callback) message = Message(msg.message, msg.ack_id, self._request_queue) - self._executor.submit(self._callback, message) + future = self._executor.submit(self._callback, message) + logger.debug('Result: %s' % future.result()) From 5784d4d1de3bbad0282fadcc2bd610686a6c1fdc Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 3 Aug 2017 14:03:59 -0700 Subject: [PATCH 60/86] Change batch time, add gRPC time logging. --- .../cloud/pubsub_v1/publisher/batch/thread.py | 15 ++++++++++++--- pubsub/google/cloud/pubsub_v1/types.py | 2 +- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 37664ef5ffec..ecb88a50c755 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -14,6 +14,7 @@ from __future__ import absolute_import +import logging import threading import time import uuid @@ -167,10 +168,18 @@ def _commit(self): # Begin the request to publish these messages. if len(self._messages) == 0: return + + # Make the actual GRPC request. + # Log how long the underlying request takes. + start = time.time() response = self.client.api.publish( self._topic, self.messages, ) + end = time.time() + logging.getLogger().debug('gRPC Publish took {sec} seconds.'.format( + sec=end - start, + )) # We got a response from Pub/Sub; denote that we are processing. self._status = 'processing results' @@ -186,9 +195,9 @@ def _commit(self): # We are trusting that there is a 1:1 mapping, and raise an exception # if not. self._status = self.Status.SUCCESS - for message_id, fut in zip(response.message_ids, self._futures): - self.message_ids[hash(fut)] = message_id - fut._trigger() + for message_id, future in zip(response.message_ids, self._futures): + self.message_ids[hash(future)] = message_id + future._trigger() def monitor(self): """Commit this batch after sufficient time has elapsed. diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index ec92ab38524d..33d1353abe50 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -34,7 +34,7 @@ ) BatchSettings.__new__.__defaults__ = ( 1024 * 1024 * 5, # max_bytes: 5 MB - 1.0, # max_latency: 1.0 seconds + 0.05, # max_latency: 0.05 seconds 1000, # max_messages: 1,000 ) From 97d8431e9b4e3f089751c996c976fe890445f88f Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 3 Aug 2017 14:17:44 -0700 Subject: [PATCH 61/86] Unit test fix. --- .../unit/pubsub_v1/publisher/test_publisher_client.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_consumer.py | 4 ++-- .../unit/pubsub_v1/subscriber/test_policy_base.py | 10 ++++++---- .../unit/pubsub_v1/subscriber/test_policy_thread.py | 2 +- .../pubsub_v1/subscriber/test_subscriber_client.py | 4 ++-- 5 files changed, 12 insertions(+), 10 deletions(-) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 6ee66d636578..f10863b92d47 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -29,7 +29,7 @@ def test_init(): # batch settings object, which should have the defaults. assert isinstance(client.api, publisher_client.PublisherClient) assert client.batch_settings.max_bytes == 5 * (2 ** 20) - assert client.batch_settings.max_latency == 1.0 + assert client.batch_settings.max_latency == 0.05 assert client.batch_settings.max_messages == 1000 diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 3ab7b21e86c1..05cb646a0f02 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -27,7 +27,7 @@ def create_consumer(): client = subscriber.Client() - subscription = client.subscribe('sub_name') + subscription = client.subscribe('sub_name_e') return consumer.Consumer(policy=subscription) @@ -46,7 +46,7 @@ def test_request_generator_thread(): # The first request that comes from the request generator thread # should always be the initial request. initial_request = next(generator) - assert initial_request.subscription == 'sub_name' + assert initial_request.subscription == 'sub_name_e' assert initial_request.stream_ack_deadline_seconds == 10 # Subsequent requests correspond to items placed in the request queue. diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index b665e7898289..ca124bdcea31 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -23,7 +23,7 @@ def create_policy(flow_control=types.FlowControl()): client = subscriber.Client() - return thread.Policy(client, 'sub_name', flow_control=flow_control) + return thread.Policy(client, 'sub_name_d', flow_control=flow_control) def test_ack_deadline(): @@ -39,7 +39,7 @@ def test_get_initial_request(): policy = create_policy() initial_request = policy.get_initial_request() assert isinstance(initial_request, types.StreamingPullRequest) - assert initial_request.subscription == 'sub_name' + assert initial_request.subscription == 'sub_name_d' assert initial_request.stream_ack_deadline_seconds == 10 @@ -56,7 +56,7 @@ def test_managed_ack_ids(): def test_subscription(): policy = create_policy() - assert policy.subscription == 'sub_name' + assert policy.subscription == 'sub_name_d' def test_ack(): @@ -86,7 +86,9 @@ def test_ack_paused(): policy = create_policy() policy._paused = True policy._consumer.active = False - policy.ack('ack_id_string') + with mock.patch.object(policy, 'open') as open_: + policy.ack('ack_id_string') + open_.assert_called() assert 'ack_id_string' in policy._ack_on_resume diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index d87848a76d9d..e715ddc79c42 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -29,7 +29,7 @@ def create_policy(): client = subscriber.Client() - return thread.Policy(client, 'sub_name') + return thread.Policy(client, 'sub_name_c') def test_init(): diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 77f8b016abb6..6489de321f11 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -25,7 +25,7 @@ def test_init(): def test_subscribe(): client = subscriber.Client() - subscription = client.subscribe('sub_name') + subscription = client.subscribe('sub_name_a') assert isinstance(subscription, thread.Policy) @@ -33,6 +33,6 @@ def test_subscribe_with_callback(): client = subscriber.Client() callback = mock.Mock() with mock.patch.object(thread.Policy, 'open') as open_: - subscription = client.subscribe('sub_name', callback) + subscription = client.subscribe('sub_name_b', callback) open_.assert_called_once_with(callback) assert isinstance(subscription, thread.Policy) From cb7dc05fb7abdde95d3c99c129d00354112c219b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 4 Aug 2017 07:54:28 -0700 Subject: [PATCH 62/86] Minor RST fixes (thanks @jonparrott). --- pubsub/google/cloud/pubsub_v1/subscriber/histogram.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py index 0e3d74d68b25..09f047495896 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/histogram.py @@ -34,8 +34,8 @@ def __init__(self, data=None): """Instantiate the histogram. Args: - data (dict): The data strucure to be used to store the - underlying data. The default is an empty dictionary. + data (Mapping[str, int]): The data strucure to be used to store + the underlying data. The default is an empty dictionary. This can be set to a dictionary-like object if required (for example, if a special object is needed for concurrency reasons). @@ -129,8 +129,8 @@ def percentile(self, percent): """Return the value that is the Nth precentile in the histogram. Args: - percent (int|float): The precentile being sought. The default - consumer implementations use consistently use ``99``. + percent (Union[int, float]): The precentile being sought. The + default consumer implementations use consistently use ``99``. Returns: int: The value corresponding to the requested percentile. From 69944651d54b8cf5cc6aec5332409119d1ccc80a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 4 Aug 2017 08:02:28 -0700 Subject: [PATCH 63/86] Remove the ignore in .flake8. --- pubsub/.flake8 | 6 ------ pubsub/nox.py | 8 +++++--- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/pubsub/.flake8 b/pubsub/.flake8 index 712bd8afe7f4..25168dc87605 100644 --- a/pubsub/.flake8 +++ b/pubsub/.flake8 @@ -4,9 +4,3 @@ exclude = .git, *.pyc, conf.py - -ignore = - # Allow "under-indented" continuation lines. - E124, - # Allow closing parentheses to column-match the opening call. - E128 diff --git a/pubsub/nox.py b/pubsub/nox.py index f1f66ec5c214..c860e0741fe6 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -38,9 +38,10 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', '--cov-append', '--cov-report=', + session.run( + 'py.test', '--quiet', '--cov-append', '--cov-report=', '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', - '--cov-config=.coveragerc', 'tests/unit' + '--cov-config=.coveragerc', 'tests/unit', ) @@ -86,7 +87,8 @@ def lint(session): '--library-filesets', 'google', '--test-filesets', 'tests', # Temporarily allow this to fail. - success_codes=range(0, 100)) + success_codes=range(0, 100), + ) @nox.session From eae7e1434e79cdc5e4c6fb131ab5273460725665 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 9 Aug 2017 10:29:50 -0700 Subject: [PATCH 64/86] Set gRPC limit to 20MB + 1 --- pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py | 6 ++++++ pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py | 9 ++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index ecb88a50c755..232534739551 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -19,6 +19,8 @@ import time import uuid +from google import gax + from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.batch import base @@ -175,6 +177,10 @@ def _commit(self): response = self.client.api.publish( self._topic, self.messages, + options=gax.CallOptions(**{ + 'grpc.max_message_length': 20 * (1024 ** 2) + 1, + 'grpc.max_receive_message_length': 20 * (1024 ** 2) + 1, + }), ) end = time.time() logging.getLogger().debug('gRPC Publish took {sec} seconds.'.format( diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index dd416886b5d1..bf52f4cc604f 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -21,6 +21,8 @@ import six +from google import gax + from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import consumer from google.cloud.pubsub_v1.subscriber import histogram @@ -174,7 +176,12 @@ def call_rpc(self, request_generator): and blocks if there are no outstanding requests (until such time as there are). """ - return self._client.api.streaming_pull(request_generator) + return self._client.api.streaming_pull( + request_generator, + options=gax.CallOptions(**{ + 'grpc.max_receive_message_length': 20 * (1024 ** 2) + 1, + }), + ) def drop(self, ack_id, byte_size): """Remove the given ack ID from lease management. From 6afcd2a3965541bd98b89d2f0655344113f9964e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 14 Aug 2017 21:01:18 -0700 Subject: [PATCH 65/86] Suppress not-working grpc options. --- pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 232534739551..6d5ff11068a8 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -177,10 +177,10 @@ def _commit(self): response = self.client.api.publish( self._topic, self.messages, - options=gax.CallOptions(**{ - 'grpc.max_message_length': 20 * (1024 ** 2) + 1, - 'grpc.max_receive_message_length': 20 * (1024 ** 2) + 1, - }), + # options=gax.CallOptions(**{ + # 'grpc.max_message_length': 20 * (1024 ** 2) + 1, + # 'grpc.max_receive_message_length': 20 * (1024 ** 2) + 1, + # }), ) end = time.time() logging.getLogger().debug('gRPC Publish took {sec} seconds.'.format( From 8c7c30e906ed3f365ee6d94abf9a6d0f5a5c77f1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 18 Aug 2017 14:59:00 -0700 Subject: [PATCH 66/86] Narrative publishing docs. --- docs/index.rst | 2 +- docs/pubsub/client.rst | 6 - docs/pubsub/iam.rst | 7 - docs/pubsub/index.rst | 117 ++ docs/pubsub/message.rst | 6 - docs/pubsub/publisher/api/batch.rst | 8 +- docs/pubsub/publisher/api/client.rst | 6 +- docs/pubsub/publisher/index.rst | 85 +- docs/pubsub/snippets.py | 483 -------- docs/pubsub/subscriber/index.rst | 2 + docs/pubsub/subscription.rst | 7 - docs/pubsub/topic.rst | 7 - docs/pubsub/types.rst | 5 + docs/pubsub/usage.rst | 245 ---- .../cloud/proto/pubsub/v1/pubsub_pb2.py | 1065 +++++++++++++++-- .../cloud/proto/pubsub/v1/pubsub_pb2_grpc.py | 48 + .../cloud/pubsub_v1/publisher/client.py | 15 +- pubsub/google/cloud/pubsub_v1/types.py | 7 +- 18 files changed, 1262 insertions(+), 859 deletions(-) delete mode 100644 docs/pubsub/client.rst delete mode 100644 docs/pubsub/iam.rst create mode 100644 docs/pubsub/index.rst delete mode 100644 docs/pubsub/message.rst delete mode 100644 docs/pubsub/snippets.py create mode 100644 docs/pubsub/subscriber/index.rst delete mode 100644 docs/pubsub/subscription.rst delete mode 100644 docs/pubsub/topic.rst create mode 100644 docs/pubsub/types.rst delete mode 100644 docs/pubsub/usage.rst diff --git a/docs/index.rst b/docs/index.rst index ee47a2ac378f..d9afe5f31af4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,7 +8,7 @@ datastore/usage dns/usage language/usage - pubsub/usage + pubsub/index resource-manager/api runtimeconfig/usage spanner/usage diff --git a/docs/pubsub/client.rst b/docs/pubsub/client.rst deleted file mode 100644 index 2745c1d808ee..000000000000 --- a/docs/pubsub/client.rst +++ /dev/null @@ -1,6 +0,0 @@ -Pub/Sub Client -============== - -.. automodule:: google.cloud.pubsub.client - :members: - :show-inheritance: diff --git a/docs/pubsub/iam.rst b/docs/pubsub/iam.rst deleted file mode 100644 index 26943762605b..000000000000 --- a/docs/pubsub/iam.rst +++ /dev/null @@ -1,7 +0,0 @@ -IAM Policy -~~~~~~~~~~ - -.. automodule:: google.cloud.pubsub.iam - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/index.rst b/docs/pubsub/index.rst new file mode 100644 index 000000000000..2845ff666534 --- /dev/null +++ b/docs/pubsub/index.rst @@ -0,0 +1,117 @@ +####### +Pub/Sub +####### + +`Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that +allows you to send and receive messages between independent applications. You +can leverage Cloud Pub/Sub’s flexibility to decouple systems and components +hosted on Google Cloud Platform or elsewhere on the Internet. By building on +the same technology Google uses, Cloud Pub/Sub is designed to provide “at +least once” delivery at low latency with on-demand scalability to 1 million +messages per second (and beyond). + +.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/ + +******************************** +Authentication and Configuration +******************************** + +- For an overview of authentication in ``google-cloud-python``, + see :doc:`/core/auth`. + +- In addition to any authentication configuration, you should also set the + :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd + like to interact with. If the :envvar:`GOOGLE_CLOUD_PROJECT` environment + variable is not present, the project ID from JSON file credentials is used. + + If you are using Google App Engine or Google Compute Engine + this will be detected automatically. + +- After configuring your environment, create a + :class:`~google.cloud.pubsub_v1.PublisherClient` or + :class:`~google.cloud.pubsub_v1.SubscriberClient`. + +.. code-block:: python + + >>> from google.cloud import pubsub + >>> publisher = pubsub.PublisherClient() + >>> subscriber = pubsub.SubscriberClient() + +or pass in ``credentials`` explicitly. + +.. code-block:: python + + >>> from google.cloud import pubsub + >>> client = pubsub.PublisherClient( + ... credentials=creds, + ... ) + +********** +Publishing +********** + +To publish data to Cloud Pub/Sub you must create a topic, and then publish +messages to it + +.. code-block:: python + + >>> import os + >>> from google.cloud import pubsub + >>> + >>> publisher = pubsub.PublisherClient() + >>> topic = 'projects/{project_id}/topics/{topic}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... topic='MY_TOPIC_NAME', # Set this to something appropriate. + ... ) + >>> publisher.create_topic() + >>> publisher.publish(topic, b'My first message!', spam='eggs') + +To learn more, consult the :doc:`publishing documentation `. + + +*********** +Subscribing +*********** + +To subscribe to data in Cloud Pub/Sub, you create a subscription based on +the topic, and subscribe to that. + +.. code-block:: python + + >>> import os + >>> from google.cloud import pubsub + >>> + >>> subscriber = pubsub.SubscriberClient() + >>> topic = 'projects/{project_id}/topics/{topic}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... topic='MY_TOPIC_NAME', # Set this to something appropriate. + ... ) + >>> subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. + ... ) + >>> subscription = subscriber.create_subscription(topic, subscription) + +The subscription is opened asychronously, and messages are processed by +use of a callback. + +.. code-block:: python + + >>> def callback(message): + ... print(message.data) + ... message.ack() + >>> subscription.open(callback) + +To learn more, consult the :doc:`subscriber documentation `. + + +************* +API Reference +************* + +.. toctree:: + :maxdepth: 3 + + publisher/index + subscriber/index + types diff --git a/docs/pubsub/message.rst b/docs/pubsub/message.rst deleted file mode 100644 index 654c607d46b3..000000000000 --- a/docs/pubsub/message.rst +++ /dev/null @@ -1,6 +0,0 @@ -Message -~~~~~~~ - -.. automodule:: google.cloud.pubsub.message - :members: - :show-inheritance: diff --git a/docs/pubsub/publisher/api/batch.rst b/docs/pubsub/publisher/api/batch.rst index b7556845b06a..5846d3ff9416 100644 --- a/docs/pubsub/publisher/api/batch.rst +++ b/docs/pubsub/publisher/api/batch.rst @@ -1,6 +1,8 @@ -Pub/Sub Batch API -================= +:orphan: -.. automodule:: google.cloud.pubsub_v1.client.batch.thread +Batch API +========= + +.. automodule:: google.cloud.pubsub_v1.publisher.batch.thread :members: :inherited-members: diff --git a/docs/pubsub/publisher/api/client.rst b/docs/pubsub/publisher/api/client.rst index 9d04d4f669a7..47a3aa3d5d7a 100644 --- a/docs/pubsub/publisher/api/client.rst +++ b/docs/pubsub/publisher/api/client.rst @@ -1,6 +1,6 @@ -Pub/Sub Client API -================== +Publisher Client API +==================== -.. automodule:: google.cloud.pubsub_v1.client +.. automodule:: google.cloud.pubsub_v1.publisher.client :members: :inherited-members: diff --git a/docs/pubsub/publisher/index.rst b/docs/pubsub/publisher/index.rst index 0d2a608706cb..c9cb5dd77582 100644 --- a/docs/pubsub/publisher/index.rst +++ b/docs/pubsub/publisher/index.rst @@ -1,31 +1,34 @@ Publishing Messages =================== -Publishing messages is handled through the :class:`.publisher.Client` class. -This class provides methods to create topics, and (most importantly) a -:meth:`~.pubsub_v1.publisher.Client.publish` method that publishes +Publishing messages is handled through the +:class:`~.pubsub_v1.publisher.client.Client` class (aliased as +``google.cloud.pubsub.PublisherClient``). This class provides methods to +create topics, and (most importantly) a +:meth:`~.pubsub_v1.publisher.client.Client.publish` method that publishes messages to Pub/Sub. -Instantiating a publishing client is straightforward:: +Instantiating a publishing client is straightforward: .. code-block:: python from google.cloud import pubsub - publish_client = pubsub.publisher.Client() + publish_client = pubsub.PublisherClient() Publish a Message ----------------- -To publish a message, use the :meth:`~.pubsub_v1.publisher.Client.publish` -method. This method accepts two positional arguments: the topic to publish to, -and the body of the message. It also accepts arbitrary keyword arguments, -which are passed along as attributes of the message. +To publish a message, use the +:meth:`~.pubsub_v1.publisher.client.Client.publish` method. This method accepts +two positional arguments: the topic to publish to, and the body of the message. +It also accepts arbitrary keyword arguments, which are passed along as +attributes of the message. The topic is passed along as a string; all topics have the canonical form of ``projects/{project_name}/topics/{topic_name}``. -Therefore, a very basic publishing call looks like:: +Therefore, a very basic publishing call looks like: .. code-block:: python @@ -59,3 +62,65 @@ Whenever you publish a message, a :class:`~.pubsub_v1.publisher.batch.thread.Batch` is automatically created. This way, if you publish a large volume of messages, it reduces the number of requests made to the server. + +The way that this works is that on the first message that you send, a new +:class:`~.pubsub_v1.publisher.batch.thread.Batch` is created automatically. +For every subsequent message, if there is already a valid batch that is still +accepting messages, then that batch is used. When the batch is created, it +begins a countdown that publishes the batch once sufficient time has +elapsed (by default, this is 0.05 seconds). + +If you need different batching settings, simply provide a +:class:`~.pubsub_v1.types.BatchSettings` object when you instantiate the +:class:`~.pubsub_v1.publisher.client.Client`: + +.. code-block:: python + + from google.cloud import pubsub + from google.cloud.pubsub import types + + client = pubsub.PublisherClient( + batch_settings=BatchSettings(max_messages=500), + ) + +Pub/Sub accepts a maximum of 1,000 messages in a batch, and the size of a +batch can not exceed 10 megabytes. + + +Futures +------- + +Every call to :meth:`~.pubsub_v1.publisher.client.Client.publish` will return +a class that conforms to the :class:`~concurrent.futures.Future` interface. +You can use this to ensure that the publish succeeded: + +.. code-block:: python + + # The .result() method will block until the future is complete. + # If there is an error, it will raise an exception. + future = client.publish(topic, b'My awesome message.') + message_id = future.result() + +You can also attach a callback to the future: + +.. code-block:: python + + # Callbacks receive the future as their only argument, as defined in + # the Future interface. + def callback(future): + message_id = future.result() + do_something_with(message_id) + + # The callback is added once you get the future. If you add a callback + # and the future is already done, it will simply be executed immediately. + future = client.publish(topic, b'My awesome message.') + future.add_done_callback(callback) + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + api/client diff --git a/docs/pubsub/snippets.py b/docs/pubsub/snippets.py deleted file mode 100644 index 96eea175c0cd..000000000000 --- a/docs/pubsub/snippets.py +++ /dev/null @@ -1,483 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Testable usage examples for Google Cloud Pubsub API wrapper - -Each example function takes a ``client`` argument (which must be an instance -of :class:`google.cloud.pubsub.client.Client`) and uses it to perform a task -with the API. - -To facilitate running the examples as system tests, each example is also passed -a ``to_delete`` list; the function adds to the list any objects created which -need to be deleted during teardown. -""" - -import time - -from google.cloud.pubsub.client import Client - - -def snippet(func): - """Mark ``func`` as a snippet example function.""" - func._snippet = True - return func - - -def _millis(): - return time.time() * 1000 - - -@snippet -def client_list_topics(client, to_delete): # pylint: disable=unused-argument - """List topics for a project.""" - - def do_something_with(sub): # pylint: disable=unused-argument - pass - - # [START client_list_topics] - for topic in client.list_topics(): # API request(s) - do_something_with(topic) - # [END client_list_topics] - - -@snippet -def client_list_subscriptions(client, - to_delete): # pylint: disable=unused-argument - """List all subscriptions for a project.""" - - def do_something_with(sub): # pylint: disable=unused-argument - pass - - # [START client_list_subscriptions] - for subscription in client.list_subscriptions(): # API request(s) - do_something_with(subscription) - # [END client_list_subscriptions] - - -@snippet -def client_topic(client, to_delete): # pylint: disable=unused-argument - """Topic factory.""" - TOPIC_NAME = 'topic_factory-%d' % (_millis(),) - - # [START client_topic] - topic = client.topic(TOPIC_NAME) - # [END client_topic] - - -@snippet -def client_subscription(client, to_delete): # pylint: disable=unused-argument - """Subscription factory.""" - SUBSCRIPTION_NAME = 'subscription_factory-%d' % (_millis(),) - - # [START client_subscription] - subscription = client.subscription( - SUBSCRIPTION_NAME, ack_deadline=60, - retain_acked_messages=True) - # [END client_subscription] - - -@snippet -def topic_create(client, to_delete): - """Create a topic.""" - TOPIC_NAME = 'topic_create-%d' % (_millis(),) - - # [START topic_create] - topic = client.topic(TOPIC_NAME) - topic.create() # API request - # [END topic_create] - - to_delete.append(topic) - - -@snippet -def topic_exists(client, to_delete): - """Test existence of a topic.""" - TOPIC_NAME = 'topic_exists-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - to_delete.append(topic) - - # [START topic_exists] - assert not topic.exists() # API request - topic.create() # API request - assert topic.exists() # API request - # [END topic_exists] - - -@snippet -def topic_delete(client, to_delete): # pylint: disable=unused-argument - """Delete a topic.""" - TOPIC_NAME = 'topic_delete-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() # API request - - # [START topic_delete] - assert topic.exists() # API request - topic.delete() - assert not topic.exists() # API request - # [END topic_delete] - - -@snippet -def topic_iam_policy(client, to_delete): - """Fetch / set a topic's IAM policy.""" - TOPIC_NAME = 'topic_iam_policy-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_get_iam_policy] - policy = topic.get_iam_policy() # API request - # [END topic_get_iam_policy] - - assert len(policy.viewers) == 0 - assert len(policy.editors) == 0 - assert len(policy.owners) == 0 - - # [START topic_set_iam_policy] - ALL_USERS = policy.all_users() - policy.viewers = [ALL_USERS] - LOGS_GROUP = policy.group('cloud-logs@google.com') - policy.editors = [LOGS_GROUP] - new_policy = topic.set_iam_policy(policy) # API request - # [END topic_set_iam_policy] - - assert ALL_USERS in new_policy.viewers - assert LOGS_GROUP in new_policy.editors - - -# @snippet # Disabled due to #1687 -def topic_check_iam_permissions(client, to_delete): - """Check topic IAM permissions.""" - TOPIC_NAME = 'topic_check_iam_permissions-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_check_iam_permissions] - from google.cloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE - TO_CHECK = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = topic.check_iam_permissions(TO_CHECK) - assert set(ALLOWED) == set(TO_CHECK) - # [END topic_check_iam_permissions] - - -@snippet -def topic_publish_messages(client, to_delete): - """Publish messages to a topic.""" - TOPIC_NAME = 'topic_publish_messages-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_publish_simple_message] - topic.publish(b'This is the message payload') # API request - # [END topic_publish_simple_message] - - # [START topic_publish_message_with_attrs] - topic.publish(b'Another message payload', extra='EXTRA') # API request - # [END topic_publish_message_with_attrs] - - -@snippet -def topic_subscription(client, to_delete): - """Create subscriptions to a topic.""" - TOPIC_NAME = 'topic_subscription-%d' % (_millis(),) - SUB_DEFAULTS = 'topic_subscription-defaults-%d' % (_millis(),) - SUB_ACK90 = 'topic_subscription-ack90-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_subscription_defaults] - sub_defaults = topic.subscription(SUB_DEFAULTS) - # [END topic_subscription_defaults] - - sub_defaults.create() # API request - to_delete.append(sub_defaults) - expected_names = set() - expected_names.add(sub_defaults.full_name) - - # [START topic_subscription_ack90] - sub_ack90 = topic.subscription(SUB_ACK90, ack_deadline=90) - # [END topic_subscription_ack90] - - sub_ack90.create() # API request - to_delete.append(sub_ack90) - expected_names.add(sub_ack90.full_name) - - sub_names = set() - - def do_something_with(sub): - sub_names.add(sub.full_name) - - # [START topic_list_subscriptions] - for subscription in topic.list_subscriptions(): # API request(s) - do_something_with(subscription) - # [END topic_list_subscriptions] - - assert sub_names.issuperset(expected_names) - - -# @snippet: disabled, because push-mode requires a validated endpoint URL -def topic_subscription_push(client, to_delete): - """Create subscriptions to a topic.""" - TOPIC_NAME = 'topic_subscription_push-%d' % (_millis(),) - SUB_PUSH = 'topic_subscription_push-sub-%d' % (_millis(),) - PUSH_URL = 'https://api.example.com/push-endpoint' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_subscription_push] - subscription = topic.subscription(SUB_PUSH, push_endpoint=PUSH_URL) - subscription.create() # API request - # [END topic_subscription_push] - - # [START subscription_push_pull] - subscription.modify_push_configuration(push_endpoint=None) # API request - # [END subscription_push_pull] - - # [START subscription_pull_push] - subscription.modify_push_configuration( - push_endpoint=PUSH_URL) # API request - # [END subscription_pull_push] - - -@snippet -def subscription_lifecycle(client, to_delete): - """Test lifecycle of a subscription.""" - TOPIC_NAME = 'subscription_lifecycle-%d' % (_millis(),) - SUB_NAME = 'subscription_lifecycle-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START subscription_create] - subscription = topic.subscription(SUB_NAME) - subscription.create() # API request - # [END subscription_create] - - # [START subscription_exists] - assert subscription.exists() # API request - # [END subscription_exists] - - # [START subscription_reload] - subscription.reload() # API request - # [END subscription_reload] - - # [START subscription_delete] - subscription.delete() # API request - # [END subscription_delete] - - -@snippet -def subscription_pull(client, to_delete): - """Pull messges from a subscribed topic.""" - TOPIC_NAME = 'subscription_pull-%d' % (_millis(),) - SUB_NAME = 'subscription_pull-defaults-%d' % (_millis(),) - PAYLOAD1 = b'PAYLOAD1' - PAYLOAD2 = b'PAYLOAD2' - EXTRA = 'EXTRA' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_pull_return_immediately] - pulled = subscription.pull(return_immediately=True) - # [END subscription_pull_return_immediately] - assert len(pulled) == 0, "unexpected message" - - topic.publish(PAYLOAD1) - topic.publish(PAYLOAD2, extra=EXTRA) - - time.sleep(1) # eventually-consistent - - # [START subscription_pull] - pulled = subscription.pull(max_messages=2) - # [END subscription_pull] - - assert len(pulled) == 2, "eventual consistency" - - # [START subscription_modify_ack_deadline] - for ack_id, _ in pulled: - subscription.modify_ack_deadline(ack_id, 90) # API request - # [END subscription_modify_ack_deadline] - - payloads = [] - extras = [] - - def do_something_with(message): # pylint: disable=unused-argument - payloads.append(message.data) - if message.attributes: - extras.append(message.attributes) - - class ApplicationException(Exception): - pass - - def log_exception(_): - pass - - # [START subscription_acknowledge] - for ack_id, message in pulled: - try: - do_something_with(message) - except ApplicationException as e: - log_exception(e) - else: - subscription.acknowledge([ack_id]) - # [END subscription_acknowledge] - - assert set(payloads) == set([PAYLOAD1, PAYLOAD2]), 'payloads: %s' % ( - (payloads,)) - assert extras == [{'extra': EXTRA}], 'extras: %s' % ( - (extras,)) - - -@snippet -def subscription_pull_w_autoack(client, to_delete): - """Pull messges from a topic, auto-acknowldging them""" - TOPIC_NAME = 'subscription_pull_autoack-%d' % (_millis(),) - SUB_NAME = 'subscription_pull_autoack-defaults-%d' % (_millis(),) - PAYLOAD1 = b'PAYLOAD1' - PAYLOAD2 = b'PAYLOAD2' - EXTRA = 'EXTRA' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START topic_batch] - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, extra=EXTRA) - # [END topic_batch] - - time.sleep(1) # eventually-consistent - - payloads = [] - extras = [] - - def do_something_with(message): # pylint: disable=unused-argument - payloads.append(message.data) - if message.attributes: - extras.append(message.attributes) - - # [START subscription_pull_autoack] - from google.cloud.pubsub.subscription import AutoAck - with AutoAck(subscription, max_messages=10) as ack: - for ack_id, message in list(ack.items()): - try: - do_something_with(message) - except Exception: # pylint: disable=broad-except - del ack[ack_id] - # [END subscription_pull_autoack] - - assert set(payloads) == set(PAYLOAD1, PAYLOAD1), "eventual consistency" - assert extras == [{'extra': EXTRA}], "eventual consistency" - - -@snippet -def subscription_iam_policy(client, to_delete): - """Fetch / set a subscription's IAM policy.""" - TOPIC_NAME = 'subscription_iam_policy-%d' % (_millis(),) - SUB_NAME = 'subscription_iam_policy-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_get_iam_policy] - policy = subscription.get_iam_policy() # API request - # [END subscription_get_iam_policy] - - assert len(policy.viewers) == 0 - assert len(policy.editors) == 0 - assert len(policy.owners) == 0 - - # [START subscription_set_iam_policy] - ALL_USERS = policy.all_users() - policy.viewers = [ALL_USERS] - LOGS_GROUP = policy.group('cloud-logs@google.com') - policy.editors = [LOGS_GROUP] - new_policy = subscription.set_iam_policy(policy) # API request - # [END subscription_set_iam_policy] - - assert ALL_USERS in new_policy.viewers - assert LOGS_GROUP in new_policy.editors - - -# @snippet # Disabled due to #1687 -def subscription_check_iam_permissions(client, to_delete): - """Check subscription IAM permissions.""" - TOPIC_NAME = 'subscription_check_iam_permissions-%d' % (_millis(),) - SUB_NAME = 'subscription_check_iam_permissions-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_check_iam_permissions] - from google.cloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE - TO_CHECK = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = subscription.check_iam_permissions(TO_CHECK) - assert set(ALLOWED) == set(TO_CHECK) - # [END subscription_check_iam_permissions] - - -def _line_no(func): - code = getattr(func, '__code__', None) or getattr(func, 'func_code') - return code.co_firstlineno - - -def _find_examples(): - funcs = [obj for obj in globals().values() - if getattr(obj, '_snippet', False)] - for func in sorted(funcs, key=_line_no): - yield func - - -def _name_and_doc(func): - return func.__name__, func.__doc__ - - -def main(): - client = Client() - for example in _find_examples(): - to_delete = [] - print('%-25s: %s' % _name_and_doc(example)) - try: - example(client, to_delete) - except AssertionError as e: - print(' FAIL: %s' % (e,)) - except Exception as e: # pylint: disable=broad-except - print(' ERROR: %r' % (e,)) - for item in to_delete: - item.delete() - - -if __name__ == '__main__': - main() diff --git a/docs/pubsub/subscriber/index.rst b/docs/pubsub/subscriber/index.rst new file mode 100644 index 000000000000..0c1cf4dd0b4c --- /dev/null +++ b/docs/pubsub/subscriber/index.rst @@ -0,0 +1,2 @@ +Subscribing to Messages +======================= diff --git a/docs/pubsub/subscription.rst b/docs/pubsub/subscription.rst deleted file mode 100644 index f242cb644e83..000000000000 --- a/docs/pubsub/subscription.rst +++ /dev/null @@ -1,7 +0,0 @@ -Subscriptions -~~~~~~~~~~~~~ - -.. automodule:: google.cloud.pubsub.subscription - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/topic.rst b/docs/pubsub/topic.rst deleted file mode 100644 index 323d467a08ce..000000000000 --- a/docs/pubsub/topic.rst +++ /dev/null @@ -1,7 +0,0 @@ -Topics -~~~~~~ - -.. automodule:: google.cloud.pubsub.topic - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/types.rst b/docs/pubsub/types.rst new file mode 100644 index 000000000000..87c987571766 --- /dev/null +++ b/docs/pubsub/types.rst @@ -0,0 +1,5 @@ +Pub/Sub Client Types +==================== + +.. automodule:: google.cloud.pubsub_v1.types + :members: diff --git a/docs/pubsub/usage.rst b/docs/pubsub/usage.rst deleted file mode 100644 index 96727e654835..000000000000 --- a/docs/pubsub/usage.rst +++ /dev/null @@ -1,245 +0,0 @@ -Pub / Sub -========= - - -.. toctree:: - :maxdepth: 2 - :hidden: - - client - topic - subscription - message - iam - -Authentication / Configuration ------------------------------- - -- Use :class:`Client ` objects to configure - your applications. - -- In addition to any authentication configuration, you should also set the - :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd like - to interact with. If you are Google App Engine or Google Compute Engine - this will be detected automatically. - -- The library now enables the ``gRPC`` transport for the pubsub API by - default, assuming that the required dependencies are installed and - importable. To *disable* this transport, set the - :envvar:`GOOGLE_CLOUD_DISABLE_GRPC` environment variable to a - non-empty string, e.g.: ``$ export GOOGLE_CLOUD_DISABLE_GRPC=true``. - -- :class:`Client ` objects hold both a ``project`` - and an authenticated connection to the PubSub service. - -- The authentication credentials can be implicitly determined from the - environment or directly via - :meth:`from_service_account_json ` - and - :meth:`from_service_account_p12 `. - -- After setting ``GOOGLE_APPLICATION_CREDENTIALS`` and ``GOOGLE_CLOUD_PROJECT`` - environment variables, create a :class:`Client ` - - .. code-block:: python - - >>> from google.cloud import pubsub - >>> client = pubsub.Client() - - -Manage topics for a project ---------------------------- - -List topics for the default project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_topics] - :end-before: [END client_list_topics] - -Create a new topic for the default project: - -.. literalinclude:: snippets.py - :start-after: [START topic_create] - :end-before: [END topic_create] - -Check for the existence of a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_exists] - :end-before: [END topic_exists] - -Delete a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_delete] - :end-before: [END topic_delete] - -Fetch the IAM policy for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_get_iam_policy] - :end-before: [END topic_get_iam_policy] - -Update the IAM policy for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_set_iam_policy] - :end-before: [END topic_set_iam_policy] - -Test permissions allowed by the current IAM policy on a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_check_iam_permissions] - :end-before: [END topic_check_iam_permissions] - - -Publish messages to a topic ---------------------------- - -Publish a single message to a topic, without attributes: - -.. literalinclude:: snippets.py - :start-after: [START topic_publish_simple_message] - :end-before: [END topic_publish_simple_message] - -Publish a single message to a topic, with attributes: - -.. literalinclude:: snippets.py - :start-after: [START topic_publish_message_with_attrs] - :end-before: [END topic_publish_message_with_attrs] - -Publish a set of messages to a topic (as a single request): - -.. literalinclude:: snippets.py - :start-after: [START topic_batch] - :end-before: [END topic_batch] - -.. note:: - - The only API request happens during the ``__exit__()`` of the topic - used as a context manager, and only if the block exits without raising - an exception. - - -Manage subscriptions to topics ------------------------------- - -List all subscriptions for the default project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_subscriptions] - :end-before: [END client_list_subscriptions] - -List subscriptions for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_list_subscriptions] - :end-before: [END topic_list_subscriptions] - -Create a new pull subscription for a topic, with defaults: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_defaults] - :end-before: [END topic_subscription_defaults] - -Create a new pull subscription for a topic with a non-default ACK deadline: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_ack90] - :end-before: [END topic_subscription_ack90] - -Create a new push subscription for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_push] - :end-before: [END topic_subscription_push] - -Check for the existence of a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_exists] - :end-before: [END subscription_exists] - -Convert a pull subscription to push: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_push] - :end-before: [END subscription_pull_push] - -Convert a push subscription to pull: - -.. literalinclude:: snippets.py - :start-after: [START subscription_push_pull] - :end-before: [END subscription_push_pull] - -Re-synchronize a subscription with the back-end: - -.. literalinclude:: snippets.py - :start-after: [START subscription_reload] - :end-before: [END subscription_reload] - -Fetch the IAM policy for a subscription - -.. literalinclude:: snippets.py - :start-after: [START subscription_get_iam_policy] - :end-before: [END subscription_get_iam_policy] - -Update the IAM policy for a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_set_iam_policy] - :end-before: [END subscription_set_iam_policy] - -Test permissions allowed by the current IAM policy on a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_check_iam_permissions] - :end-before: [END subscription_check_iam_permissions] - -Delete a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_delete] - :end-before: [END subscription_delete] - - -Pull messages from a subscription ---------------------------------- - -Fetch pending messages for a pull subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull] - :end-before: [END subscription_pull] - -Note that received messages must be acknowledged, or else the back-end -will re-send them later: - -.. literalinclude:: snippets.py - :start-after: [START subscription_acknowledge] - :end-before: [END subscription_acknowledge] - -Fetch messages for a pull subscription without blocking (none pending): - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_return_immediately] - :end-before: [END subscription_pull_return_immediately] - -Update the acknowlegement deadline for pulled messages: - -.. literalinclude:: snippets.py - :start-after: [START subscription_modify_ack_deadline] - :end-before: [END subscription_modify_ack_deadline] - -Fetch pending messages, acknowledging those whose processing doesn't raise an -error: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_autoack] - :end-before: [END subscription_pull_autoack] - -.. note:: - - The ``pull`` API request occurs at entry to the ``with`` block, and the - ``acknowlege`` API request occurs at the end, passing only the ``ack_ids`` - which haven't been deleted from ``ack`` diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py index 07919f8c5646..aeee99e182d0 100644 --- a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py @@ -24,7 +24,7 @@ name='google/cloud/proto/pubsub/v1/pubsub.proto', package='google.pubsub.v1', syntax='proto3', - serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x15\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xda\x01\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"X\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xe8\x0f\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9b\x06\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"y\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xf7\x10\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9a\x07\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -32,6 +32,43 @@ +_TOPIC_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Topic.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Topic.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Topic.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + _TOPIC = _descriptor.Descriptor( name='Topic', full_name='google.pubsub.v1.Topic', @@ -46,10 +83,17 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Topic.labels', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], - nested_types=[], + nested_types=[_TOPIC_LABELSENTRY, ], enum_types=[ ], options=None, @@ -59,7 +103,7 @@ oneofs=[ ], serialized_start=221, - serialized_end=242, + serialized_end=342, ) @@ -96,8 +140,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=415, - serialized_end=464, + serialized_start=515, + serialized_end=564, ) _PUBSUBMESSAGE = _descriptor.Descriptor( @@ -147,8 +191,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=245, - serialized_end=464, + serialized_start=345, + serialized_end=564, ) @@ -178,8 +222,46 @@ extension_ranges=[], oneofs=[ ], - serialized_start=466, - serialized_end=498, + serialized_start=566, + serialized_end=598, +) + + +_UPDATETOPICREQUEST = _descriptor.Descriptor( + name='UpdateTopicRequest', + full_name='google.pubsub.v1.UpdateTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.UpdateTopicRequest.topic', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateTopicRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=600, + serialized_end=709, ) @@ -216,8 +298,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=500, - serialized_end=582, + serialized_start=711, + serialized_end=793, ) @@ -247,8 +329,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=584, - serialized_end=622, + serialized_start=795, + serialized_end=833, ) @@ -292,8 +374,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=624, - serialized_end=699, + serialized_start=835, + serialized_end=910, ) @@ -330,8 +412,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=701, - serialized_end=787, + serialized_start=912, + serialized_end=998, ) @@ -375,8 +457,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=789, - serialized_end=874, + serialized_start=1000, + serialized_end=1085, ) @@ -413,8 +495,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=876, - serialized_end=956, + serialized_start=1087, + serialized_end=1167, ) @@ -444,11 +526,48 @@ extension_ranges=[], oneofs=[ ], - serialized_start=958, - serialized_end=993, + serialized_start=1169, + serialized_end=1204, ) +_SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Subscription.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Subscription.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Subscription.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + _SUBSCRIPTION = _descriptor.Descriptor( name='Subscription', full_name='google.pubsub.v1.Subscription', @@ -498,10 +617,17 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Subscription.labels', index=6, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], - nested_types=[], + nested_types=[_SUBSCRIPTION_LABELSENTRY, ], enum_types=[ ], options=None, @@ -510,8 +636,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=996, - serialized_end=1214, + serialized_start=1207, + serialized_end=1532, ) @@ -548,8 +674,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=415, - serialized_end=464, + serialized_start=515, + serialized_end=564, ) _PUSHCONFIG = _descriptor.Descriptor( @@ -585,8 +711,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1217, - serialized_end=1369, + serialized_start=1535, + serialized_end=1687, ) @@ -623,8 +749,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1371, - serialized_end=1454, + serialized_start=1689, + serialized_end=1772, ) @@ -654,8 +780,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1456, - serialized_end=1502, + serialized_start=1774, + serialized_end=1820, ) @@ -692,8 +818,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1505, - serialized_end=1635, + serialized_start=1823, + serialized_end=1953, ) @@ -737,8 +863,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1637, - serialized_end=1719, + serialized_start=1955, + serialized_end=2037, ) @@ -775,8 +901,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1721, - serialized_end=1828, + serialized_start=2039, + serialized_end=2146, ) @@ -806,8 +932,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1830, - serialized_end=1879, + serialized_start=2148, + serialized_end=2197, ) @@ -844,8 +970,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1881, - serialized_end=1979, + serialized_start=2199, + serialized_end=2297, ) @@ -889,8 +1015,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1981, - serialized_end=2066, + serialized_start=2299, + serialized_end=2384, ) @@ -920,8 +1046,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2068, - serialized_end=2144, + serialized_start=2386, + serialized_end=2462, ) @@ -965,8 +1091,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2146, - serialized_end=2241, + serialized_start=2464, + serialized_end=2559, ) @@ -1003,8 +1129,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2243, - serialized_end=2302, + serialized_start=2561, + serialized_end=2620, ) @@ -1062,8 +1188,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2305, - serialized_end=2469, + serialized_start=2623, + serialized_end=2787, ) @@ -1093,8 +1219,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2471, - serialized_end=2556, + serialized_start=2789, + serialized_end=2874, ) @@ -1131,11 +1257,86 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2558, - serialized_end=2617, + serialized_start=2876, + serialized_end=2935, +) + + +_UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='UpdateSnapshotRequest', + full_name='google.pubsub.v1.UpdateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.UpdateSnapshotRequest.snapshot', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSnapshotRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2937, + serialized_end=3055, ) +_SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Snapshot.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Snapshot.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Snapshot.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + _SNAPSHOT = _descriptor.Descriptor( name='Snapshot', full_name='google.pubsub.v1.Snapshot', @@ -1164,10 +1365,17 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Snapshot.labels', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], - nested_types=[], + nested_types=[_SNAPSHOT_LABELSENTRY, ], enum_types=[ ], options=None, @@ -1176,8 +1384,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2619, - serialized_end=2707, + serialized_start=3058, + serialized_end=3249, ) @@ -1221,8 +1429,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2709, - serialized_end=2787, + serialized_start=3251, + serialized_end=3329, ) @@ -1259,8 +1467,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2789, - serialized_end=2884, + serialized_start=3331, + serialized_end=3426, ) @@ -1290,8 +1498,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2886, - serialized_end=2927, + serialized_start=3428, + serialized_end=3469, ) @@ -1338,8 +1546,8 @@ name='target', full_name='google.pubsub.v1.SeekRequest.target', index=0, containing_type=None, fields=[]), ], - serialized_start=2929, - serialized_end=3038, + serialized_start=3471, + serialized_end=3580, ) @@ -1362,17 +1570,23 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3040, - serialized_end=3054, + serialized_start=3582, + serialized_end=3596, ) +_TOPIC_LABELSENTRY.containing_type = _TOPIC +_TOPIC.fields_by_name['labels'].message_type = _TOPIC_LABELSENTRY _PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE _PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY _PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_UPDATETOPICREQUEST.fields_by_name['topic'].message_type = _TOPIC +_UPDATETOPICREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK _PUBLISHREQUEST.fields_by_name['messages'].message_type = _PUBSUBMESSAGE _LISTTOPICSRESPONSE.fields_by_name['topics'].message_type = _TOPIC +_SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION _SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG _SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_SUBSCRIPTION.fields_by_name['labels'].message_type = _SUBSCRIPTION_LABELSENTRY _PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG _PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY _RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE @@ -1382,7 +1596,11 @@ _MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG _PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE _STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_UPDATESNAPSHOTREQUEST.fields_by_name['snapshot'].message_type = _SNAPSHOT +_UPDATESNAPSHOTREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT _SNAPSHOT.fields_by_name['expire_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SNAPSHOT.fields_by_name['labels'].message_type = _SNAPSHOT_LABELSENTRY _LISTSNAPSHOTSRESPONSE.fields_by_name['snapshots'].message_type = _SNAPSHOT _SEEKREQUEST.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _SEEKREQUEST.oneofs_by_name['target'].fields.append( @@ -1394,6 +1612,7 @@ DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST +DESCRIPTOR.message_types_by_name['UpdateTopicRequest'] = _UPDATETOPICREQUEST DESCRIPTOR.message_types_by_name['PublishRequest'] = _PUBLISHREQUEST DESCRIPTOR.message_types_by_name['PublishResponse'] = _PUBLISHRESPONSE DESCRIPTOR.message_types_by_name['ListTopicsRequest'] = _LISTTOPICSREQUEST @@ -1417,6 +1636,7 @@ DESCRIPTOR.message_types_by_name['StreamingPullRequest'] = _STREAMINGPULLREQUEST DESCRIPTOR.message_types_by_name['StreamingPullResponse'] = _STREAMINGPULLRESPONSE DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['UpdateSnapshotRequest'] = _UPDATESNAPSHOTREQUEST DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE @@ -1425,11 +1645,35 @@ DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _TOPIC_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) + )) + , DESCRIPTOR = _TOPIC, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A topic resource. + + + Attributes: + name: + The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters (``[A-Za-z]``), + numbers (``[0-9]``), dashes (``-``), underscores (``_``), + periods (``.``), tildes (``~``), plus (``+``) or percent signs + (``%``). It must be between 3 and 255 characters in length, + and it must not start with ``"goog"``. + labels: + User labels. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) )) _sym_db.RegisterMessage(Topic) +_sym_db.RegisterMessage(Topic.LabelsEntry) PubsubMessage = _reflection.GeneratedProtocolMessageType('PubsubMessage', (_message.Message,), dict( @@ -1441,6 +1685,28 @@ , DESCRIPTOR = _PUBSUBMESSAGE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message data and its attributes. The message payload must not be + empty; it must contain either a non-empty data field, or at least one + attribute. + + + Attributes: + data: + The message payload. + attributes: + Optional attributes for this message. + message_id: + ID of this message, assigned by the server when the message is + published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` call. + publish_time: + The time at which the message was published, populated by the + server when it receives the ``Publish`` call. It must not be + populated by the publisher in a ``Publish`` call. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) )) _sym_db.RegisterMessage(PubsubMessage) @@ -1449,13 +1715,51 @@ GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( DESCRIPTOR = _GETTOPICREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetTopic method. + + + Attributes: + topic: + The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) )) _sym_db.RegisterMessage(GetTopicRequest) +UpdateTopicRequest = _reflection.GeneratedProtocolMessageType('UpdateTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateTopic method. + + + Attributes: + topic: + The topic to update. + update_mask: + Indicates which fields in the provided topic to update. Must + be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) + )) +_sym_db.RegisterMessage(UpdateTopicRequest) + PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( DESCRIPTOR = _PUBLISHREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Publish method. + + + Attributes: + topic: + The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages: + The messages to publish. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) )) _sym_db.RegisterMessage(PublishRequest) @@ -1463,6 +1767,16 @@ PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( DESCRIPTOR = _PUBLISHRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Publish`` method. + + + Attributes: + message_ids: + The server-assigned ID of each published message, in the same + order as the messages in the request. IDs are guaranteed to be + unique within the topic. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) )) _sym_db.RegisterMessage(PublishResponse) @@ -1470,6 +1784,22 @@ ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopics`` method. + + + Attributes: + project: + The name of the cloud project that topics belong to. Format is + ``projects/{project}``. + page_size: + Maximum number of topics to return. + page_token: + The value returned by the last ``ListTopicsResponse``; + indicates that this is a continuation of a prior + ``ListTopics`` call, and that the system should return the + next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) )) _sym_db.RegisterMessage(ListTopicsRequest) @@ -1477,6 +1807,18 @@ ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopics`` method. + + + Attributes: + topics: + The resulting topics. + next_page_token: + If not empty, indicates that there may be more topics that + match the request; this value should be passed in a new + ``ListTopicsRequest``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) )) _sym_db.RegisterMessage(ListTopicsResponse) @@ -1484,6 +1826,22 @@ ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopicSubscriptions`` method. + + + Attributes: + topic: + The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size: + Maximum number of subscription names to return. + page_token: + The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) )) _sym_db.RegisterMessage(ListTopicSubscriptionsRequest) @@ -1491,6 +1849,18 @@ ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopicSubscriptions`` method. + + + Attributes: + subscriptions: + The names of the subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListTopicSubscriptionsRequest`` to get more subscriptions. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) )) _sym_db.RegisterMessage(ListTopicSubscriptionsResponse) @@ -1498,16 +1868,88 @@ DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( DESCRIPTOR = _DELETETOPICREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteTopic`` method. + + + Attributes: + topic: + Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) )) _sym_db.RegisterMessage(DeleteTopicRequest) Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTION_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) + )) + , DESCRIPTOR = _SUBSCRIPTION, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A subscription resource. + + + Attributes: + name: + The name of the subscription. It must have the format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain only + letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and 255 + characters in length, and it must not start with ``"goog"``. + topic: + The name of the topic from which this subscription is + receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this field + will be ``_deleted-topic_`` if the topic has been deleted. + push_config: + If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that + the subscriber will pull and ack messages using API methods. + ack_deadline_seconds: + This value is the maximum time after a subscriber receives a + message before the subscriber should acknowledge the message. + After message delivery but before the ack deadline expires and + before the message is acknowledged, it is an outstanding + message and will not be delivered again during that time (on a + best-effort basis). For pull subscriptions, this value is + used as the initial value for the ack deadline. To override + this value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using pull. The minimum + custom deadline you can specify is 10 seconds. The maximum + custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + For push delivery, this value is also used to set the request + timeout for the call to the push endpoint. If the subscriber + never acknowledges the message, the Pub/Sub system will + eventually redeliver the message. + retain_acked_messages: + Indicates whether to retain acknowledged messages. If true, + then messages are not expunged from the subscription's + backlog, even if they are acknowledged, until they fall out of + the ``message_retention_duration`` window. + message_retention_duration: + How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + labels: + User labels. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) )) _sym_db.RegisterMessage(Subscription) +_sym_db.RegisterMessage(Subscription.LabelsEntry) PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( @@ -1519,6 +1961,35 @@ , DESCRIPTOR = _PUSHCONFIG, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Configuration for a push delivery endpoint. + + + Attributes: + push_endpoint: + A URL locating the endpoint to which messages should be + pushed. For example, a Webhook endpoint might use + "https://example.com/push". + attributes: + Endpoint configuration attributes. Every endpoint has a set + of API supported attributes that can be used to control + different aspects of the message delivery. The currently + supported attribute is ``x-goog-version``, which you can use + to change the format of the pushed message. This attribute + indicates the version of the data expected by the endpoint. + This controls the shape of the pushed message (i.e., its + fields and metadata). The endpoint version is based on the + version of the Pub/Sub API. If not present during the + ``CreateSubscription`` call, it will default to the version of + the API used to make such call. If not present during a + ``ModifyPushConfig`` call, its value will not be changed. + ``GetSubscription`` calls will always return a valid version, + even if the subscription was created without this attribute. + The possible values for this attribute are: - ``v1beta1``: + uses the push format defined in the v1beta1 Pub/Sub API. - + ``v1`` or ``v1beta2``: uses the push format defined in the v1 + Pub/Sub API. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) )) _sym_db.RegisterMessage(PushConfig) @@ -1527,6 +1998,16 @@ ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( DESCRIPTOR = _RECEIVEDMESSAGE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message and its corresponding acknowledgment ID. + + + Attributes: + ack_id: + This ID can be used to acknowledge the received message. + message: + The message. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) )) _sym_db.RegisterMessage(ReceivedMessage) @@ -1534,6 +2015,15 @@ GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetSubscription method. + + + Attributes: + subscription: + The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) )) _sym_db.RegisterMessage(GetSubscriptionRequest) @@ -1541,6 +2031,17 @@ UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSubscription method. + + + Attributes: + subscription: + The updated subscription object. + update_mask: + Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) )) _sym_db.RegisterMessage(UpdateSubscriptionRequest) @@ -1548,6 +2049,22 @@ ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSubscriptions`` method. + + + Attributes: + project: + The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size: + Maximum number of subscriptions to return. + page_token: + The value returned by the last ``ListSubscriptionsResponse``; + indicates that this is a continuation of a prior + ``ListSubscriptions`` call, and that the system should return + the next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) )) _sym_db.RegisterMessage(ListSubscriptionsRequest) @@ -1555,6 +2072,18 @@ ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSubscriptions`` method. + + + Attributes: + subscriptions: + The subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListSubscriptionsRequest`` to get more subscriptions. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) )) _sym_db.RegisterMessage(ListSubscriptionsResponse) @@ -1562,6 +2091,15 @@ DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the DeleteSubscription method. + + + Attributes: + subscription: + The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) )) _sym_db.RegisterMessage(DeleteSubscriptionRequest) @@ -1569,6 +2107,21 @@ ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyPushConfig method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config: + The push configuration for future deliveries. An empty + ``pushConfig`` indicates that the Pub/Sub system should stop + pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) )) _sym_db.RegisterMessage(ModifyPushConfigRequest) @@ -1576,6 +2129,26 @@ PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( DESCRIPTOR = _PULLREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Pull`` method. + + + Attributes: + subscription: + The subscription from which messages should be pulled. Format + is ``projects/{project}/subscriptions/{sub}``. + return_immediately: + If this field set to true, the system will respond immediately + even if it there are no messages available to return in the + ``Pull`` response. Otherwise, the system may wait (for a + bounded amount of time) until at least one message is + available, rather than returning no messages. The client may + cancel the request if it does not wish to wait any longer for + the response. + max_messages: + The maximum number of messages returned for this request. The + Pub/Sub system may return fewer than the number specified. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) )) _sym_db.RegisterMessage(PullRequest) @@ -1583,6 +2156,18 @@ PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( DESCRIPTOR = _PULLRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Pull`` method. + + + Attributes: + received_messages: + Received Pub/Sub messages. The Pub/Sub system will return zero + messages if there are no more available in the backlog. The + Pub/Sub system may return fewer than the ``maxMessages`` + requested even if there are more messages available in the + backlog. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) )) _sym_db.RegisterMessage(PullResponse) @@ -1590,6 +2175,26 @@ ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyAckDeadline method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgment IDs. + ack_deadline_seconds: + The new ack deadline with respect to the time this request was + sent to the Pub/Sub system. For example, if the value is 10, + the new ack deadline will expire 10 seconds after the + ``ModifyAckDeadline`` call was made. Specifying zero may + immediately make the message available for another pull + request. The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 + minutes). + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) )) _sym_db.RegisterMessage(ModifyAckDeadlineRequest) @@ -1597,6 +2202,19 @@ AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( DESCRIPTOR = _ACKNOWLEDGEREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Acknowledge method. + + + Attributes: + subscription: + The subscription whose message is being acknowledged. Format + is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + The acknowledgment ID for the messages being acknowledged that + was returned by the Pub/Sub system in the ``Pull`` response. + Must not be empty. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) )) _sym_db.RegisterMessage(AcknowledgeRequest) @@ -1604,6 +2222,55 @@ StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( DESCRIPTOR = _STREAMINGPULLREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``StreamingPull`` streaming RPC method. This request is + used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to the + server. + + + Attributes: + subscription: + The subscription for which to initialize the new stream. This + must be provided in the first request on the stream, and must + not be set in subsequent requests from client to server. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgement IDs for acknowledging previously + received messages (received on this stream or a different + stream). If an ack ID has expired, the corresponding message + may be redelivered later. Acknowledging a message more than + once will not result in an error. If the acknowledgement ID is + malformed, the stream will be aborted with status + ``INVALID_ARGUMENT``. + modify_deadline_seconds: + The list of new ack deadlines for the IDs listed in + ``modify_deadline_ack_ids``. The size of this list must be the + same as the size of ``modify_deadline_ack_ids``. If it differs + the stream will be aborted with ``INVALID_ARGUMENT``. Each + element in this list is applied to the element in the same + position in ``modify_deadline_ack_ids``. The new ack deadline + is with respect to the time this request was sent to the + Pub/Sub system. Must be >= 0. For example, if the value is 10, + the new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids: + List of acknowledgement IDs whose deadline will be modified + based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if + the processing was interrupted. + stream_ack_deadline_seconds: + The ack deadline to use for the stream. This must be provided + in the first request on the stream, but it can also be updated + on subsequent requests from client to server. The minimum + deadline you can specify is 10 seconds. The maximum deadline + you can specify is 600 seconds (10 minutes). + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) )) _sym_db.RegisterMessage(StreamingPullRequest) @@ -1611,6 +2278,15 @@ StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( DESCRIPTOR = _STREAMINGPULLRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + + Attributes: + received_messages: + Received Pub/Sub messages. This will not be empty. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) )) _sym_db.RegisterMessage(StreamingPullResponse) @@ -1618,20 +2294,109 @@ CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _CREATESNAPSHOTREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``CreateSnapshot`` method. + + + Attributes: + name: + Optional user-provided name for this snapshot. If the name is + not provided in the request, the server will assign a random + name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. Format is + ``projects/{project}/snapshots/{snap}``. + subscription: + The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + (a) The existing backlog on the subscription. More precisely, + this is defined as the messages in the subscription's backlog + that are unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the successful + completion of the CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) )) _sym_db.RegisterMessage(CreateSnapshotRequest) +UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType('UpdateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSnapshot method. + + + Attributes: + snapshot: + The updated snpashot object. + update_mask: + Indicates which fields in the provided snapshot to update. + Must be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) + )) +_sym_db.RegisterMessage(UpdateSnapshotRequest) + Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SNAPSHOT_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) + )) + , DESCRIPTOR = _SNAPSHOT, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A snapshot resource. + + + Attributes: + name: + The name of the snapshot. + topic: + The name of the topic from which this snapshot is retaining + messages. + expire_time: + The snapshot is guaranteed to exist up until this time. A + newly-created snapshot expires no later than 7 days from the + time of its creation. Its exact lifetime is determined at + creation by the existing backlog in the source subscription. + Specifically, the lifetime of the snapshot is ``7 days - (age + of oldest unacked message in the subscription)``. For example, + consider a subscription whose oldest unacked message is 3 days + old. If a snapshot is created from this subscription, the + snapshot -- which will always capture this 3-day-old backlog + as long as the snapshot exists -- will expire in 4 days. + labels: + User labels. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) )) _sym_db.RegisterMessage(Snapshot) +_sym_db.RegisterMessage(Snapshot.LabelsEntry) ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTSNAPSHOTSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSnapshots`` method. + + + Attributes: + project: + The name of the cloud project that snapshots belong to. Format + is ``projects/{project}``. + page_size: + Maximum number of snapshots to return. + page_token: + The value returned by the last ``ListSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListSnapshots`` call, and that the system should return the + next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) )) _sym_db.RegisterMessage(ListSnapshotsRequest) @@ -1639,6 +2404,18 @@ ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSnapshots`` method. + + + Attributes: + snapshots: + The resulting snapshots. + next_page_token: + If not empty, indicates that there may be more snapshot that + match the request; this value should be passed in a new + ``ListSnapshotsRequest``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) )) _sym_db.RegisterMessage(ListSnapshotsResponse) @@ -1646,6 +2423,15 @@ DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _DELETESNAPSHOTREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteSnapshot`` method. + + + Attributes: + snapshot: + The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) )) _sym_db.RegisterMessage(DeleteSnapshotRequest) @@ -1653,6 +2439,31 @@ SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( DESCRIPTOR = _SEEKREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Seek`` method. + + + Attributes: + subscription: + The subscription to affect. + time: + The time to seek to. Messages retained in the subscription + that were published before this time are marked as + acknowledged, and messages retained in the subscription that + were published after this time are marked as unacknowledged. + Note that this operation affects only those messages retained + in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). + For example, if ``time`` corresponds to a point before the + message retention window (or to a point before the system's + notion of the subscription creation time), only retained + messages will be marked as unacknowledged, and already- + expunged messages will not be restored. + snapshot: + The snapshot to seek to. The snapshot's topic must be the same + as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) )) _sym_db.RegisterMessage(SeekRequest) @@ -1667,10 +2478,16 @@ DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1')) +_TOPIC_LABELSENTRY.has_options = True +_TOPIC_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True _PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SUBSCRIPTION_LABELSENTRY.has_options = True +_SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _PUSHCONFIG_ATTRIBUTESENTRY.has_options = True _PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SNAPSHOT_LABELSENTRY.has_options = True +_SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) try: # THESE ELEMENTS WILL BE DEPRECATED. # Please use the generated *_pb2_grpc.py files instead. @@ -1752,6 +2569,11 @@ def __init__(self, channel): request_serializer=CreateSnapshotRequest.SerializeToString, response_deserializer=Snapshot.FromString, ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=UpdateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) self.DeleteSnapshot = channel.unary_unary( '/google.pubsub.v1.Subscriber/DeleteSnapshot', request_serializer=DeleteSnapshotRequest.SerializeToString, @@ -1795,6 +2617,10 @@ def GetSubscription(self, request, context): def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -1905,6 +2731,18 @@ def CreateSnapshot(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def DeleteSnapshot(self, request, context): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -1986,6 +2824,11 @@ def add_SubscriberServicer_to_server(servicer, server): request_deserializer=CreateSnapshotRequest.FromString, response_serializer=Snapshot.SerializeToString, ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=UpdateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( servicer.DeleteSnapshot, request_deserializer=DeleteSnapshotRequest.FromString, @@ -2018,6 +2861,11 @@ def __init__(self, channel): request_serializer=Topic.SerializeToString, response_deserializer=Topic.FromString, ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=UpdateTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) self.Publish = channel.unary_unary( '/google.pubsub.v1.Publisher/Publish', request_serializer=PublishRequest.SerializeToString, @@ -2057,6 +2905,18 @@ def CreateTopic(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -2106,6 +2966,11 @@ def add_PublisherServicer_to_server(servicer, server): request_deserializer=Topic.FromString, response_serializer=Topic.SerializeToString, ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=UpdateTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), 'Publish': grpc.unary_unary_rpc_method_handler( servicer.Publish, request_deserializer=PublishRequest.FromString, @@ -2166,6 +3031,10 @@ def GetSubscription(self, request, context): def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def ListSubscriptions(self, request, context): @@ -2246,6 +3115,15 @@ def CreateSnapshot(self, request, context): Note that for REST API requests, you must specify a name in the request. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def DeleteSnapshot(self, request, context): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -2291,6 +3169,10 @@ def GetSubscription(self, request, timeout, metadata=None, with_call=False, prot def UpdateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ raise NotImplementedError() UpdateSubscription.future = None @@ -2380,6 +3262,16 @@ def CreateSnapshot(self, request, timeout, metadata=None, with_call=False, proto """ raise NotImplementedError() CreateSnapshot.future = None + def UpdateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateSnapshot.future = None def DeleteSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -2416,6 +3308,7 @@ def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_t ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.FromString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.FromString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.FromString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.FromString, } response_serializers = { @@ -2432,6 +3325,7 @@ def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_t ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.SerializeToString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.SerializeToString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.SerializeToString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.SerializeToString, } method_implementations = { @@ -2448,6 +3342,7 @@ def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_t ('google.pubsub.v1.Subscriber', 'Pull'): face_utilities.unary_unary_inline(servicer.Pull), ('google.pubsub.v1.Subscriber', 'Seek'): face_utilities.unary_unary_inline(servicer.Seek), ('google.pubsub.v1.Subscriber', 'StreamingPull'): face_utilities.stream_stream_inline(servicer.StreamingPull), + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): face_utilities.unary_unary_inline(servicer.UpdateSnapshot), ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): face_utilities.unary_unary_inline(servicer.UpdateSubscription), } server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) @@ -2474,6 +3369,7 @@ def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, p ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.SerializeToString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.SerializeToString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.SerializeToString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.SerializeToString, } response_deserializers = { @@ -2490,6 +3386,7 @@ def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, p ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.FromString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.FromString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.FromString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.FromString, } cardinalities = { @@ -2506,6 +3403,7 @@ def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, p 'Pull': cardinality.Cardinality.UNARY_UNARY, 'Seek': cardinality.Cardinality.UNARY_UNARY, 'StreamingPull': cardinality.Cardinality.STREAM_STREAM, + 'UpdateSnapshot': cardinality.Cardinality.UNARY_UNARY, 'UpdateSubscription': cardinality.Cardinality.UNARY_UNARY, } stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) @@ -2525,6 +3423,15 @@ def CreateTopic(self, request, context): """Creates the given topic with the given name. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -2567,6 +3474,16 @@ def CreateTopic(self, request, timeout, metadata=None, with_call=False, protocol """ raise NotImplementedError() CreateTopic.future = None + def UpdateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateTopic.future = None def Publish(self, request, timeout, metadata=None, with_call=False, protocol_options=None): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -2613,6 +3530,7 @@ def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_ti ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.FromString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.FromString, ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.FromString, } response_serializers = { ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, @@ -2621,6 +3539,7 @@ def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_ti ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.SerializeToString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.SerializeToString, ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.SerializeToString, } method_implementations = { ('google.pubsub.v1.Publisher', 'CreateTopic'): face_utilities.unary_unary_inline(servicer.CreateTopic), @@ -2629,6 +3548,7 @@ def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_ti ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): face_utilities.unary_unary_inline(servicer.ListTopicSubscriptions), ('google.pubsub.v1.Publisher', 'ListTopics'): face_utilities.unary_unary_inline(servicer.ListTopics), ('google.pubsub.v1.Publisher', 'Publish'): face_utilities.unary_unary_inline(servicer.Publish), + ('google.pubsub.v1.Publisher', 'UpdateTopic'): face_utilities.unary_unary_inline(servicer.UpdateTopic), } server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) return beta_implementations.server(method_implementations, options=server_options) @@ -2647,6 +3567,7 @@ def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, po ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.SerializeToString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.SerializeToString, ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.SerializeToString, } response_deserializers = { ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, @@ -2655,6 +3576,7 @@ def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, po ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.FromString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.FromString, ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.FromString, } cardinalities = { 'CreateTopic': cardinality.Cardinality.UNARY_UNARY, @@ -2663,6 +3585,7 @@ def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, po 'ListTopicSubscriptions': cardinality.Cardinality.UNARY_UNARY, 'ListTopics': cardinality.Cardinality.UNARY_UNARY, 'Publish': cardinality.Cardinality.UNARY_UNARY, + 'UpdateTopic': cardinality.Cardinality.UNARY_UNARY, } stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Publisher', cardinalities, options=stub_options) diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py index 5a970cbc77ab..06dd470470d8 100644 --- a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py @@ -76,6 +76,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) self.DeleteSnapshot = channel.unary_unary( '/google.pubsub.v1.Subscriber/DeleteSnapshot', request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, @@ -119,6 +124,10 @@ def GetSubscription(self, request, context): def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -229,6 +238,18 @@ def CreateSnapshot(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def DeleteSnapshot(self, request, context): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -310,6 +331,11 @@ def add_SubscriberServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.FromString, response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( servicer.DeleteSnapshot, request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, @@ -342,6 +368,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) self.Publish = channel.unary_unary( '/google.pubsub.v1.Publisher/Publish', request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.SerializeToString, @@ -381,6 +412,18 @@ def CreateTopic(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -430,6 +473,11 @@ def add_PublisherServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), 'Publish': grpc.unary_unary_rpc_method_handler( servicer.Publish, request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.FromString, diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index c5b56063a8a2..106a91ecc689 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -39,8 +39,8 @@ class Client(object): get sensible defaults. Args: - batch_settings (~.pubsub_v1.types.BatchSettings): The settings - for batch publishing. + batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The + settings for batch publishing. batch_class (class): A class that describes how to handle batches. You may subclass the :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in @@ -76,8 +76,8 @@ def batch(self, topic, message, create=True, autocommit=True): Args: topic (str): A string representing the topic. - message (~.pubsub_v1.types.PubsubMessage): The message that will - be committed. + message (~google.cloud.pubsub_v1.types.PubsubMessage): The message + that will be committed. create (bool): Whether to create a new batch if no batch is found. Defaults to True. autocommit (bool): Whether to autocommit this batch. @@ -128,16 +128,15 @@ def publish(self, topic, data, **attrs): >>> response = client.publish(topic, data, username='guido') Args: - topic (~.pubsub_v1.types.Topic): The topic to publish - messages to. + topic (str): The topic to publish messages to. data (bytes): A bytestring representing the message body. This must be a bytestring. attrs (Mapping[str, str]): A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) Returns: - ~.pubsub_v1.publisher.futures.Future: An object conforming - to the ``concurrent.futures.Future`` interface. + ~concurrent.futures.Future: An object conforming to the + ``concurrent.futures.Future`` interface. """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index 33d1353abe50..c28b85b53c1f 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -29,7 +29,8 @@ # This class is used when creating a publisher or subscriber client, and # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. -BatchSettings = collections.namedtuple('BatchSettings', +BatchSettings = collections.namedtuple( + 'BatchSettings', ['max_bytes', 'max_latency', 'max_messages'], ) BatchSettings.__new__.__defaults__ = ( @@ -43,7 +44,8 @@ # This class is used when creating a publisher or subscriber client, and # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. -FlowControl = collections.namedtuple('FlowControl', +FlowControl = collections.namedtuple( + 'FlowControl', ['max_bytes', 'max_messages', 'resume_threshold'], ) FlowControl.__new__.__defaults__ = ( @@ -60,6 +62,7 @@ names = ['BatchSettings', 'FlowControl', 'Timestamp'] for name, message in get_messages(pubsub_pb2).items(): + message.__module__ = 'google.cloud.pubsub_v1.types' setattr(sys.modules[__name__], name, message) names.append(name) From 8b502eda7f3ab540d0ca7c0a1c649cfba1f93a6f Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 07:30:08 -0700 Subject: [PATCH 67/86] Fix RST misformatting. --- docs/pubsub/publisher/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/pubsub/publisher/index.rst b/docs/pubsub/publisher/index.rst index c9cb5dd77582..7568cde43534 100644 --- a/docs/pubsub/publisher/index.rst +++ b/docs/pubsub/publisher/index.rst @@ -38,7 +38,7 @@ Therefore, a very basic publishing call looks like: .. note:: The message data in Pub/Sub is an opaque blob of bytes, and as such, you - _must_ send a ``bytes`` object in Python 3 (``str`` object in Python 2). + *must* send a ``bytes`` object in Python 3 (``str`` object in Python 2). If you send a text string (``str`` in Python 3, ``unicode`` in Python 2), the method will raise :exc:`TypeError`. From 965b2ba60dd1c536f92ee7b8dba9bb8d68a1546c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 08:48:06 -0700 Subject: [PATCH 68/86] Subscriber docs, 50% complete. --- docs/pubsub/publisher/index.rst | 4 +- docs/pubsub/subscriber/api/client.rst | 6 ++ docs/pubsub/subscriber/api/message.rst | 5 ++ docs/pubsub/subscriber/api/policy.rst | 5 ++ docs/pubsub/subscriber/index.rst | 73 +++++++++++++++++++ .../gapic/pubsub/v1/subscriber_client.py | 10 +-- .../cloud/pubsub_v1/subscriber/message.py | 3 +- .../pubsub_v1/subscriber/policy/thread.py | 6 +- 8 files changed, 100 insertions(+), 12 deletions(-) create mode 100644 docs/pubsub/subscriber/api/client.rst create mode 100644 docs/pubsub/subscriber/api/message.rst create mode 100644 docs/pubsub/subscriber/api/policy.rst diff --git a/docs/pubsub/publisher/index.rst b/docs/pubsub/publisher/index.rst index 7568cde43534..72b374b588a3 100644 --- a/docs/pubsub/publisher/index.rst +++ b/docs/pubsub/publisher/index.rst @@ -32,7 +32,7 @@ Therefore, a very basic publishing call looks like: .. code-block:: python - topic = 'projects/{project}/topics/{topic}'' + topic = 'projects/{project}/topics/{topic}' publish_client.publish(topic, b'This is my message.') .. note:: @@ -51,7 +51,7 @@ If you want to include attributes, simply add keyword arguments: .. code-block:: python - topic = 'projects/{project}/topics/{topic}'' + topic = 'projects/{project}/topics/{topic}' publish_client.publish(topic, b'This is my message.', foo='bar') diff --git a/docs/pubsub/subscriber/api/client.rst b/docs/pubsub/subscriber/api/client.rst new file mode 100644 index 000000000000..965880c5a640 --- /dev/null +++ b/docs/pubsub/subscriber/api/client.rst @@ -0,0 +1,6 @@ +Subscriber Client API +===================== + +.. automodule:: google.cloud.pubsub_v1.subscriber.client + :members: + :inherited-members: diff --git a/docs/pubsub/subscriber/api/message.rst b/docs/pubsub/subscriber/api/message.rst new file mode 100644 index 000000000000..d6566f4c363e --- /dev/null +++ b/docs/pubsub/subscriber/api/message.rst @@ -0,0 +1,5 @@ +Messages +======== + +.. autoclass:: google.cloud.pubsub_v1.subscriber.message.Message + :members: ack, attributes, data, nack, publish_time diff --git a/docs/pubsub/subscriber/api/policy.rst b/docs/pubsub/subscriber/api/policy.rst new file mode 100644 index 000000000000..95d288d0b974 --- /dev/null +++ b/docs/pubsub/subscriber/api/policy.rst @@ -0,0 +1,5 @@ +Subscriptions +============= + +.. autoclass:: google.cloud.pubsub_v1.subscriber.policy.thread.Policy + :members: open, close diff --git a/docs/pubsub/subscriber/index.rst b/docs/pubsub/subscriber/index.rst index 0c1cf4dd0b4c..13816f051bc7 100644 --- a/docs/pubsub/subscriber/index.rst +++ b/docs/pubsub/subscriber/index.rst @@ -1,2 +1,75 @@ Subscribing to Messages ======================= + +Subscribing to messages is handled through the +:class:`~.pubsub_v1.subscriber.client.Client` class (aliased as +``google.cloud.pubsub.SubscriberClient``). This class provides a +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method to +attach to subscriptions on existing topics, and (most importantly) a +:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method that +consumes messages from Pub/Sub. + +Instantiating a subscriber client is straightforward: + +.. code-block:: python + + from google.cloud import pubsub + subscriber = pubsub.SubscriberClient() + + +Creating a Subscription +----------------------- + +In Pub/Sub, a **subscription** is a discrete pull of messages from a topic. +If multiple clients pull the same subscription, then messages are split +between them. If multiple clients create a subscription each, then each client +will get every message. + +.. note:: + + Remember that Pub/Sub operates under the principle of "everything at least + once". Even in the case where multiple clients pull the same subscription, + *some* redundancy is likely. + +Creating a subscription requires that you already know what topic you want +to subscribe to, and it must already exist. Once you have that, it is easy: + +.. code-block:: python + + # Substitute {project}, {topic}, and {subscription} with appropriate + # values for your application. + topic_name = 'projects/{project}/topics/{topic}' + sub_name = 'projects/{project}/subscriptions/{subscription}' + subscriber.create_subscription(topic_name, sub_name) + + +Pulling a Subscription +---------------------- + +Once you have created a subscription (or if you already had one), the next +step is to pull data from it. This entails two steps: first you must call +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe`, passing in the +subscription string. + +.. code-block:: python + + # As before, substitute {project} and {subscription} with appropriate + # values for your application. + subscription = subscriber.subscribe( + 'projects/{project}/subscriptions/{subscription}', + ) + +This will return an object with an +:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method; calling +this method will actually begin consumption of the subscription. + + +Learn More +---------- + +.. toctree:: + :maxdepth: 2 + + api/client + api/policy + api/message diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py index ab8233824595..9976a25e2129 100644 --- a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -861,16 +861,14 @@ def create_snapshot(self, name, subscription, options=None): Format is ``projects/{project}/snapshots/{snap}``. subscription (string): The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: - (a) The existing backlog on the subscription. More precisely, this is - :: + - The existing backlog on the subscription. More precisely, this is defined as the messages in the subscription's backlog that are unacknowledged upon the successful completion of the - `CreateSnapshot` request; as well as: - (b) Any messages published to the subscription's topic following the - :: - + `CreateSnapshot` request + - Any messages published to the subscription's topic following the successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index 1e97c324b2e9..d870bf70a6e9 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -29,12 +29,13 @@ class Message(object): :class:`~.pubsub_v1.subscriber.consumer.BaseConsumer`.) .. note:: + Messages in Google Cloud Pub/Sub are opaque blobs of bytes. This means that the ``data`` attribute will consistently be a :class:`bytes` object. If you want a text string, you should use :meth:`bytes.decode`. - Properties: + Attributes: message_id (str): The message ID. In general, you should not need to use this directly. data (bytes): The data in the message. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index c03da7f81ddc..4b65c7a68e94 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -31,7 +31,7 @@ class Policy(base.BasePolicy): - """A consumer class based on :class:``threading.Thread``. + """A consumer class based on :class:`threading.Thread`. This consumer handles the connection to the Pub/Sub service and all of the concurrency needs. @@ -45,8 +45,8 @@ def __init__(self, client, subscription, flow_control=types.FlowControl()): subscription (str): The name of the subscription. The canonical format for this is ``projects/{project}/subscriptions/{subscription}``. - flow_control (~.pubsub_v1.types.FlowControl): The flow control - settings. + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. """ # Default the callback to a no-op; it is provided by `.open`. self._callback = lambda message: None From 3cc1c09dd207660aca0f306119aacf4ac1700f6e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 09:23:24 -0700 Subject: [PATCH 69/86] Manual layer documentation --- docs/pubsub/index.rst | 6 ++-- docs/pubsub/subscriber/index.rst | 52 ++++++++++++++++++++++++++++++-- 2 files changed, 53 insertions(+), 5 deletions(-) diff --git a/docs/pubsub/index.rst b/docs/pubsub/index.rst index 2845ff666534..7b7438b29f9c 100644 --- a/docs/pubsub/index.rst +++ b/docs/pubsub/index.rst @@ -105,9 +105,9 @@ use of a callback. To learn more, consult the :doc:`subscriber documentation `. -************* -API Reference -************* +********** +Learn More +********** .. toctree:: :maxdepth: 3 diff --git a/docs/pubsub/subscriber/index.rst b/docs/pubsub/subscriber/index.rst index 13816f051bc7..be32a9e9ed97 100644 --- a/docs/pubsub/subscriber/index.rst +++ b/docs/pubsub/subscriber/index.rst @@ -64,8 +64,56 @@ This will return an object with an this method will actually begin consumption of the subscription. -Learn More ----------- +Subscription Callbacks +---------------------- + +Because subscriptions in this Pub/Sub client are opened asychronously, +processing the messages that are yielded by the subscription is handled +through **callbacks**. + +The basic idea: Define a function that takes one argument; this argument +will be a :class:`~.pubsub_v1.subscriber.message.Message` instance. This +function should do whatever processing is necessary. At the end, the +function should :meth:`~.pubsub_v1.subscriber.message.Message.ack` the +message. + +When you call :meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open`, you +must pass the callback that will be used. + +Here is an example: + +.. code-block:: python + + # Define the callback. + # Note that the callback is defined *before* the subscription is opened. + def callback(message): + do_something_with(message) # Replace this with your acutal logic. + message.ack() + + # Open the subscription, passing the callback. + subscription.open(callback) + +Explaining Ack +-------------- + +In Pub/Sub, the term **ack** stands for "acknowledge". You should ack a +message when your processing of that message *has completed*. When you ack +a message, you are telling Pub/Sub that you do not need to see it again. + +It might be tempting to ack messages immediately on receipt. While there +are valid use cases for this, in general it is unwise. The reason why: If +there is some error or edge case in your processing logic, and processing +of the message fails, you will have already told Pub/Sub that you successfully +processed the message. By contrast, if you ack only upon completion, then +Pub/Sub will eventually re-deliver the unacknowledged message. + +It is also possible to **nack** a message, which is the opposite. When you +nack, it tells Pub/Sub that you are unable or unwilling to deal with the +message, and that the service should redeliver it. + + +API Reference +------------- .. toctree:: :maxdepth: 2 From 13532b3a7d14a9154aade32a5e7b54e6138aa207 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 10:21:40 -0700 Subject: [PATCH 70/86] Doc updates. --- .../google/cloud/gapic/pubsub/v1/subscriber_client.py | 10 ++++++---- pubsub/google/cloud/pubsub_v1/subscriber/message.py | 3 +-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py index 9976a25e2129..ab8233824595 100644 --- a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -861,14 +861,16 @@ def create_snapshot(self, name, subscription, options=None): Format is ``projects/{project}/snapshots/{snap}``. subscription (string): The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: + (a) The existing backlog on the subscription. More precisely, this is + :: - - The existing backlog on the subscription. More precisely, this is defined as the messages in the subscription's backlog that are unacknowledged upon the successful completion of the - `CreateSnapshot` request - - Any messages published to the subscription's topic following the - successful completion of the CreateSnapshot request. + `CreateSnapshot` request; as well as: + (b) Any messages published to the subscription's topic following the + :: + successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py index d870bf70a6e9..1e97c324b2e9 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -29,13 +29,12 @@ class Message(object): :class:`~.pubsub_v1.subscriber.consumer.BaseConsumer`.) .. note:: - Messages in Google Cloud Pub/Sub are opaque blobs of bytes. This means that the ``data`` attribute will consistently be a :class:`bytes` object. If you want a text string, you should use :meth:`bytes.decode`. - Attributes: + Properties: message_id (str): The message ID. In general, you should not need to use this directly. data (bytes): The data in the message. From f553fd6a328fa0f25a648f64b87764f81aef0f26 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 11:23:22 -0700 Subject: [PATCH 71/86] Add better Batch docstring. https://github.com/GoogleCloudPlatform/google-cloud-python/pull/3463#discussion_r124106225 --- .../google/cloud/pubsub_v1/publisher/batch/base.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index 263c18e56a80..17662df6dfe7 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -32,6 +32,19 @@ class BaseBatch(object): This class defines the interface for the Batch implementation; subclasses may be passed as the ``batch_class`` argument to :class:`~.pubsub_v1.client.PublisherClient`. + + The batching behavior works like this: When the + :class:`~.pubsub_v1.publisher.client.Client` is asked to publish a new + message, it requires a batch. The client will see if there is an + already-opened batch for the given topic; if there is, then the message + is sent to that batch. If there is not, then a new batch is created + and the message put there. + + When a new batch is created, it automatically starts a timer counting + down to the maximum latency before the batch should commit. + Essentially, if enough time passes, the batch automatically commits + regardless of how much is in it. However, if either the message count or + size thresholds are encountered first, then the batch will commit early. """ def __len__(self): """Return the number of messages currently in the batch.""" From 356749a3ae414d3d93f719edff35136b4954b693 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 11:25:16 -0700 Subject: [PATCH 72/86] Improve the max latency thread comments. --- pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 37664ef5ffec..c263d1b79f49 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -70,11 +70,9 @@ def __init__(self, client, topic, settings, autocommit=True): self._topic = topic self.message_ids = {} - # This is purely internal tracking. + # If max latency is specified, start a thread to monitor the batch and + # commit when the max latency is reached. self._thread = None - - # Continually monitor the thread until it is time to commit the - # batch, or the batch is explicitly committed. if autocommit and self._settings.max_latency < float('inf'): self._thread = threading.Thread(target=self.monitor) self._thread.start() From 8242c9de3ae7b932a9d271450f621fac73fd5b2e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 11:26:19 -0700 Subject: [PATCH 73/86] Collapse property docstrings. --- .../google/cloud/pubsub_v1/publisher/batch/thread.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index c263d1b79f49..d65d77daec2b 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -79,20 +79,12 @@ def __init__(self, client, topic, settings, autocommit=True): @property def client(self): - """Return the client used to create this batch. - - Returns: - ~.pubsub_v1.client.PublisherClient: A publisher client. - """ + """~.pubsub_v1.client.PublisherClient: A publisher client.""" return self._client @property def messages(self): - """Return the messages currently in the batch. - - Returns: - Sequence: The messages currently in the batch. - """ + """Sequence: The messages currently in the batch.""" return self._messages @property From db87dab3de63e967bd8cb81c36691fa920c9ef2d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 11:27:33 -0700 Subject: [PATCH 74/86] More @jonparrott feedback. --- .../google/cloud/pubsub_v1/publisher/batch/thread.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index d65d77daec2b..48621531f619 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -154,9 +154,11 @@ def _commit(self): # Update the status. self._status = 'in-flight' - # Begin the request to publish these messages. + # Sanity check: If there are no messages, no-op. if len(self._messages) == 0: return + + # Begin the request to publish these messages. response = self.client.api.publish( self._topic, self.messages, @@ -176,9 +178,9 @@ def _commit(self): # We are trusting that there is a 1:1 mapping, and raise an exception # if not. self._status = self.Status.SUCCESS - for message_id, fut in zip(response.message_ids, self._futures): - self.message_ids[hash(fut)] = message_id - fut._trigger() + for message_id, future in zip(response.message_ids, self._futures): + self.message_ids[hash(future)] = message_id + future._trigger() def monitor(self): """Commit this batch after sufficient time has elapsed. From 58072b8eab2b1ceb120ee4689b348820448b6322 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 12:44:44 -0700 Subject: [PATCH 75/86] Remove the client as a public item in the base batch. Also rename base.BaseBatch to base.Batch. --- .../cloud/pubsub_v1/publisher/batch/base.py | 22 +------------------ .../cloud/pubsub_v1/publisher/batch/thread.py | 2 +- 2 files changed, 2 insertions(+), 22 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index 17662df6dfe7..be7097d5f236 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -21,7 +21,7 @@ @six.add_metaclass(abc.ABCMeta) -class BaseBatch(object): +class Batch(object): """The base batching class for Pub/Sub publishing. Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based @@ -50,26 +50,6 @@ def __len__(self): """Return the number of messages currently in the batch.""" return len(self.messages) - @property - @abc.abstractmethod - def client(self): - """Return the client used to create this batch. - - Returns: - ~.pubsub_v1.client.PublisherClient: A publisher client. - """ - raise NotImplementedError - - @property - @abc.abstractmethod - def client(self): - """Return the client used to create this batch. - - Returns: - ~.pubsub_v1.client.PublisherClient: A publisher client. - """ - raise NotImplementedError - @property @abc.abstractmethod def messages(self): diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 48621531f619..23348abab0c1 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -23,7 +23,7 @@ from google.cloud.pubsub_v1.publisher.batch import base -class Batch(base.BaseBatch): +class Batch(base.Batch): """A batch of messages. The batch is the internal group of messages which are either awaiting From 8f6748857e21dbd6f961210259a52e0a4f10913b Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 12:50:03 -0700 Subject: [PATCH 76/86] Remove the rejection batch. --- .../cloud/pubsub_v1/publisher/batch/base.py | 19 ------------------- .../cloud/pubsub_v1/publisher/client.py | 13 +++++-------- 2 files changed, 5 insertions(+), 27 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index be7097d5f236..68dc9c2850ec 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -15,7 +15,6 @@ from __future__ import absolute_import import abc -import collections import six @@ -144,21 +143,3 @@ class Status(object): ACCEPTING_MESSAGES = 'accepting messages' ERROR = 'error' SUCCESS = 'success' - - -class RejectionBatch(object): - """A fake batch-like object that refuses to accept any message. - - This is used by the client to do single-op checks for batch - existence. - """ - def will_accept(self, message): - """Return False. - - Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. - - Returns: - bool: Whether this batch can accept the message. It never can. - """ - return False diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index c5b56063a8a2..010ee237e0bf 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -23,7 +23,6 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.publisher.batch import base from google.cloud.pubsub_v1.publisher.batch import thread @@ -65,10 +64,6 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): self._batch_class = batch_class self._batches = {} - # Instantiate the "rejection batch", which is used for single-op - # acceptance checks if no batch is present. - self._rejection = base.RejectionBatch() - def batch(self, topic, message, create=True, autocommit=True): """Return the current batch for the provided topic. @@ -88,18 +83,20 @@ def batch(self, topic, message, create=True, autocommit=True): """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - if not self._batches.get(topic, self._rejection).will_accept(message): + batch = self._batches.get(topic, None) + if not batch or not batch.will_accept(message): if not create: return None - self._batches[topic] = self._batch_class( + batch = self._batch_class( autocommit=autocommit, client=self, settings=self.batch_settings, topic=topic, ) + self._batches[topic] = batch # Simply return the appropriate batch. - return self._batches[topic] + return batch def publish(self, topic, data, **attrs): """Publish a single message. From a86d9b72547c67df3f942f62504a16bee7cad670 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 12:54:12 -0700 Subject: [PATCH 77/86] Lock batch acquisition. --- .../cloud/pubsub_v1/publisher/client.py | 25 +++++++++++-------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 010ee237e0bf..f3e71062caa7 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -16,6 +16,7 @@ import copy import pkg_resources +import threading import six @@ -62,6 +63,7 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # The batches on the publisher client are responsible for holding # messages. One batch exists for each topic. self._batch_class = batch_class + self._batch_lock = threading.Lock() self._batches = {} def batch(self, topic, message, create=True, autocommit=True): @@ -83,17 +85,18 @@ def batch(self, topic, message, create=True, autocommit=True): """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - batch = self._batches.get(topic, None) - if not batch or not batch.will_accept(message): - if not create: - return None - batch = self._batch_class( - autocommit=autocommit, - client=self, - settings=self.batch_settings, - topic=topic, - ) - self._batches[topic] = batch + with self._batch_lock: + batch = self._batches.get(topic, None) + if not batch or not batch.will_accept(message): + if not create: + return None + batch = self._batch_class( + autocommit=autocommit, + client=self, + settings=self.batch_settings, + topic=topic, + ) + self._batches[topic] = batch # Simply return the appropriate batch. return batch From df18615c26ba61b5c7dff6915971fb27ebaae06f Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 12:59:21 -0700 Subject: [PATCH 78/86] Alter exception superclass. --- pubsub/google/cloud/pubsub_v1/publisher/exceptions.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index bedc5d5a2a48..a78a692f748c 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -12,9 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -class PublishError(RuntimeError): +from google.api.core.exceptions import GoogleAPICallError +from google.api.core.exceptions import GoogleAPIError + + +class PublishError(GoogleAPICallError): pass -class TimeoutError(RuntimeError): +class TimeoutError(GoogleAPIError): pass From 5f0549b02da28f55e681c7471d6a1d4c44b1ce05 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:06:25 -0700 Subject: [PATCH 79/86] Inherit from google.api.core.future.Future. --- pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 23348abab0c1..5bfce2515b12 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -18,6 +18,7 @@ import time import uuid +import google.api.core.future from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.batch import base @@ -236,7 +237,7 @@ def publish(self, message): return f -class Future(object): +class Future(google.api.core.future.Future): """Encapsulation of the asynchronous execution of an action. This object is returned from asychronous Pub/Sub calls, and is the From 101d9ca9c73db80f29c63745fa79975122234f3e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:28:28 -0700 Subject: [PATCH 80/86] Move to @jonparrott's Future interface. --- .../cloud/pubsub_v1/publisher/batch/thread.py | 77 +++++++++++-------- 1 file changed, 46 insertions(+), 31 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 5bfce2515b12..c5b4cd4346f7 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -16,7 +16,6 @@ import threading import time -import uuid import google.api.core.future from google.cloud.pubsub_v1 import types @@ -69,7 +68,6 @@ def __init__(self, client, topic, settings, autocommit=True): self._settings = settings self._status = self.Status.ACCEPTING_MESSAGES self._topic = topic - self.message_ids = {} # If max latency is specified, start a thread to monitor the batch and # commit when the max latency is reached. @@ -171,17 +169,18 @@ def _commit(self): # Sanity check: If the number of message IDs is not equal to the # number of futures I have, then something went wrong. if len(response.message_ids) != len(self._futures): - raise exceptions.PublishError( - 'Some messages were not successfully published.', - ) + for future in self._futures: + future.set_exception(exceptions.PublishError( + 'Some messages were not successfully published.', + )) + return # Iterate over the futures on the queue and return the response IDs. # We are trusting that there is a 1:1 mapping, and raise an exception # if not. self._status = self.Status.SUCCESS for message_id, future in zip(response.message_ids, self._futures): - self.message_ids[hash(future)] = message_id - future._trigger() + future.set_result(message_id) def monitor(self): """Commit this batch after sufficient time has elapsed. @@ -216,8 +215,8 @@ def publish(self, message): message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: - ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the - :class:`concurrent.futures.Future` interface. + ~.pubsub_v1.publisher.batch.thread.Future: An object conforming to + the :class:`concurrent.futures.Future` interface. """ # Coerce the type, just in case. if not isinstance(message, types.PubsubMessage): @@ -232,7 +231,7 @@ def publish(self, message): # Return a Future. That future needs to be aware of the status # of this batch. - f = Future(self) + f = Future() self._futures.append(f) return f @@ -245,18 +244,9 @@ class Future(google.api.core.future.Future): This object should not be created directly, but is returned by other methods in this library. - - Args: - batch (`~.Batch`): The batch object that is committing - this message. """ - def __init__(self, batch): - self._batch = batch + def __init__(self): self._callbacks = [] - self._hash = hash(uuid.uuid4()) - - def __hash__(self): - return self._hash def cancel(self): """Publishes in Pub/Sub currently may not be canceled. @@ -285,10 +275,7 @@ def done(self): This still returns True in failure cases; checking :meth:`result` or :meth:`exception` is the canonical way to assess success or failure. """ - return self._batch.status in ( - self._batch.Status.SUCCESS, - self._batch.Status.ERROR, - ) + return self._exception is not None or self._result is not None def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -297,7 +284,7 @@ def result(self, timeout=None): returns the message ID. Args: - timeout (int|float): The number of seconds before this call + timeout (Union[int, float]): The number of seconds before this call times out and raises TimeoutError. Returns: @@ -313,7 +300,7 @@ def result(self, timeout=None): # return an appropriate value. err = self.exception(timeout=timeout) if err is None: - return self._batch.message_ids[hash(self)] + return self._result raise err def exception(self, timeout=None, _wait=1): @@ -323,7 +310,7 @@ def exception(self, timeout=None, _wait=1): returns the exception. If the call succeeded, return None. Args: - timeout (int|float): The number of seconds before this call + timeout (Union[int, float]): The number of seconds before this call times out and raises TimeoutError. Raises: @@ -337,16 +324,16 @@ def exception(self, timeout=None, _wait=1): timeout = float('inf') # If the batch completed successfully, this should return None. - if self._batch.status == 'success': + if self._result is not None: return None # If this batch had an error, this should return it. - if self._batch.status == 'error': - return self._batch.error + if self._exception is not None: + return self._exception # If the timeout has been exceeded, raise TimeoutError. if timeout <= 0: - raise exceptions.TimeoutError('Timed out waiting for exception.') + raise exceptions.TimeoutError('Timed out waiting for result.') # Wait a little while and try again. time.sleep(_wait) @@ -365,6 +352,34 @@ def add_done_callback(self, fn): fn(self) self._callbacks.append(fn) + def set_result(self, result): + """Set the result of the future to the provided result. + + Args: + result (str): The message ID. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_result can only be called once.') + + # Set the result and trigger the future. + self._result = result + self._trigger() + + def set_exception(self, exception): + """Set the result of the future to the given exception. + + Args: + exception (:exc:`Exception`): The exception raised. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_exception can only be called once.') + + # Set the exception and trigger the future. + self._exception = exception + self._trigger() + def _trigger(self): """Trigger all callbacks registered to this Future. From f196b5e3815f98e24a962ec3b0bd82a75538a1ef Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:41:23 -0700 Subject: [PATCH 81/86] Fix some tests to match new futures. --- .../publisher/batch/test_thread_future.py | 100 +++++++----------- 1 file changed, 37 insertions(+), 63 deletions(-) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py index ee4014ee3691..870c254c68c8 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py @@ -18,89 +18,51 @@ import pytest -from google.cloud.pubsub_v1 import publisher -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions -from google.cloud.pubsub_v1.publisher.batch.thread import Batch from google.cloud.pubsub_v1.publisher.batch.thread import Future -def create_batch(status=None): - """Create a batch object, which does not commit. - - Args: - status (str): If provided, the batch's internal status will be set - to the provided status. - - Returns: - ~.pubsub_v1.publisher.batch.thread.Batch: The batch object - """ - client = publisher.Client() - batch_settings = types.BatchSettings() - batch = Batch(client, 'topic_name', batch_settings, autocommit=False) - if status: - batch._status = status - return batch - - -def create_future(batch=None): - """Create a Future object to test. - - Args: - ~.pubsub_v1.publisher.batch.thread.Batch: A batch object, such - as one returned from :meth:`create_batch`. If none is provided, - a batch will be automatically created. - - Returns: - ~.pubsub_v1.publisher.batch.thread.Future: The Future object (the - class being tested in this module). - """ - if batch is None: - batch = create_batch() - return Future(batch=batch) - - def test_cancel(): - assert create_future().cancel() is False + assert Future().cancel() is False def test_cancelled(): - assert create_future().cancelled() is False + assert Future().cancelled() is False def test_running(): - assert create_future().running() is True + assert Future().running() is True def test_done(): - batch = create_batch() - future = create_future(batch=batch) + future = Future() assert future.done() is False - batch._status = batch.Status.SUCCESS - assert future._batch.status == 'success' + future.set_result('12345') assert future.done() is True def test_exception_no_error(): - batch = create_batch(status='success') - future = create_future(batch=batch) + future = Future() + future.set_result('12345') assert future.exception() is None def test_exception_with_error(): - batch = create_batch(status='error') - batch.error = RuntimeError('Something really bad happened.') - future = create_future(batch=batch) + future = Future() + error = RuntimeError('Something really bad happened.') + future.set_exception(error) # Make sure that the exception that is returned is the batch's error. # Also check the type to ensure the batch's error did not somehow # change internally. - assert future.exception() is batch.error + assert future.exception() is error assert isinstance(future.exception(), RuntimeError) + with pytest.raises(RuntimeError): + future.result() def test_exception_timeout(): - future = create_future() + future = Future() with mock.patch.object(time, 'sleep') as sleep: with pytest.raises(exceptions.TimeoutError): future.exception(timeout=10) @@ -113,22 +75,20 @@ def test_exception_timeout(): def test_result_no_error(): - batch = create_batch(status='success') - future = create_future(batch=batch) - batch.message_ids[hash(future)] = '42' + future = Future() + future.set_result('42') assert future.result() == '42' def test_result_with_error(): - batch = create_batch(status='error') - batch.error = RuntimeError('Something really bad happened.') - future = create_future(batch=batch) + future = Future() + future.set_exception(RuntimeError('Something really bad happened.')) with pytest.raises(RuntimeError): future.result() def test_add_done_callback_pending_batch(): - future = create_future() + future = Future() callback = mock.Mock() future.add_done_callback(callback) assert len(future._callbacks) == 1 @@ -137,17 +97,31 @@ def test_add_done_callback_pending_batch(): def test_add_done_callback_completed_batch(): - batch = create_batch(status='success') - future = create_future(batch=batch) + future = Future() + future.set_result('12345') callback = mock.Mock(spec=()) future.add_done_callback(callback) callback.assert_called_once_with(future) def test_trigger(): - future = create_future() + future = Future() callback = mock.Mock(spec=()) future.add_done_callback(callback) assert callback.call_count == 0 - future._trigger() + future.set_result('12345') callback.assert_called_once_with(future) + + +def test_set_result_once_only(): + future = Future() + future.set_result('12345') + with pytest.raises(RuntimeError): + future.set_result('67890') + + +def test_set_exception_once_only(): + future = Future() + future.set_exception(ValueError('wah wah')) + with pytest.raises(RuntimeError): + future.set_exception(TypeError('other wah wah')) From e6e58bb45e680cd6b8c1a3a1e035e389c524f72a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:45:15 -0700 Subject: [PATCH 82/86] Move Future off into its own module. --- .../cloud/pubsub_v1/publisher/batch/thread.py | 163 +--------------- .../cloud/pubsub_v1/publisher/futures.py | 179 ++++++++++++++++++ 2 files changed, 182 insertions(+), 160 deletions(-) create mode 100644 pubsub/google/cloud/pubsub_v1/publisher/futures.py diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index c5b4cd4346f7..6a2c83f2fcf2 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -17,9 +17,9 @@ import threading import time -import google.api.core.future from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher.batch import base @@ -215,7 +215,7 @@ def publish(self, message): message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: - ~.pubsub_v1.publisher.batch.thread.Future: An object conforming to + ~.pubsub_v1.publisher.futures.Future: An object conforming to the :class:`concurrent.futures.Future` interface. """ # Coerce the type, just in case. @@ -231,163 +231,6 @@ def publish(self, message): # Return a Future. That future needs to be aware of the status # of this batch. - f = Future() + f = futures.Future() self._futures.append(f) return f - - -class Future(google.api.core.future.Future): - """Encapsulation of the asynchronous execution of an action. - - This object is returned from asychronous Pub/Sub calls, and is the - interface to determine the status of those calls. - - This object should not be created directly, but is returned by other - methods in this library. - """ - def __init__(self): - self._callbacks = [] - - def cancel(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns False. - """ - return False - - def cancelled(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns False. - """ - return False - - def running(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns True. - """ - return True - - def done(self): - """Return True if the publish has completed, False otherwise. - - This still returns True in failure cases; checking :meth:`result` or - :meth:`exception` is the canonical way to assess success or failure. - """ - return self._exception is not None or self._result is not None - - def result(self, timeout=None): - """Return the message ID, or raise an exception. - - This blocks until the message has successfully been published, and - returns the message ID. - - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. - - Returns: - str: The message ID. - - Raises: - ~.pubsub_v1.TimeoutError: If the request times out. - Exception: For undefined exceptions in the underlying - call execution. - """ - # Attempt to get the exception if there is one. - # If there is not one, then we know everything worked, and we can - # return an appropriate value. - err = self.exception(timeout=timeout) - if err is None: - return self._result - raise err - - def exception(self, timeout=None, _wait=1): - """Return the exception raised by the call, if any. - - This blocks until the message has successfully been published, and - returns the exception. If the call succeeded, return None. - - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. - - Raises: - TimeoutError: If the request times out. - - Returns: - Exception: The exception raised by the call, if any. - """ - # If no timeout was specified, use inf. - if timeout is None: - timeout = float('inf') - - # If the batch completed successfully, this should return None. - if self._result is not None: - return None - - # If this batch had an error, this should return it. - if self._exception is not None: - return self._exception - - # If the timeout has been exceeded, raise TimeoutError. - if timeout <= 0: - raise exceptions.TimeoutError('Timed out waiting for result.') - - # Wait a little while and try again. - time.sleep(_wait) - return self.exception( - timeout=timeout - _wait, - _wait=min(_wait * 2, timeout, 60), - ) - - def add_done_callback(self, fn): - """Attach the provided callable to the future. - - The provided function is called, with this future as its only argument, - when the future finishes running. - """ - if self.done(): - fn(self) - self._callbacks.append(fn) - - def set_result(self, result): - """Set the result of the future to the provided result. - - Args: - result (str): The message ID. - """ - # Sanity check: A future can only complete once. - if self._result is not None or self._exception is not None: - raise RuntimeError('set_result can only be called once.') - - # Set the result and trigger the future. - self._result = result - self._trigger() - - def set_exception(self, exception): - """Set the result of the future to the given exception. - - Args: - exception (:exc:`Exception`): The exception raised. - """ - # Sanity check: A future can only complete once. - if self._result is not None or self._exception is not None: - raise RuntimeError('set_exception can only be called once.') - - # Set the exception and trigger the future. - self._exception = exception - self._trigger() - - def _trigger(self): - """Trigger all callbacks registered to this Future. - - This method is called internally by the batch once the batch - completes. - - Args: - message_id (str): The message ID, as a string. - """ - for callback in self._callbacks: - callback(self) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/futures.py b/pubsub/google/cloud/pubsub_v1/publisher/futures.py new file mode 100644 index 000000000000..2fb57b1c518b --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -0,0 +1,179 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import time + +import google.api.core.future +from google.cloud.pubsub_v1.publisher import exceptions + + +class Future(google.api.core.future.Future): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from asychronous Pub/Sub calls, and is the + interface to determine the status of those calls. + + This object should not be created directly, but is returned by other + methods in this library. + """ + def __init__(self): + self._callbacks = [] + self._result = None + self._exception = None + + def cancel(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def cancelled(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def running(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns True. + """ + return True + + def done(self): + """Return True if the publish has completed, False otherwise. + + This still returns True in failure cases; checking :meth:`result` or + :meth:`exception` is the canonical way to assess success or failure. + """ + return self._exception is not None or self._result is not None + + def result(self, timeout=None): + """Return the message ID, or raise an exception. + + This blocks until the message has successfully been published, and + returns the message ID. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Returns: + str: The message ID. + + Raises: + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying + call execution. + """ + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self._result + raise err + + def exception(self, timeout=None, _wait=1): + """Return the exception raised by the call, if any. + + This blocks until the message has successfully been published, and + returns the exception. If the call succeeded, return None. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + TimeoutError: If the request times out. + + Returns: + Exception: The exception raised by the call, if any. + """ + # If no timeout was specified, use inf. + if timeout is None: + timeout = float('inf') + + # If the batch completed successfully, this should return None. + if self._result is not None: + return None + + # If this batch had an error, this should return it. + if self._exception is not None: + return self._exception + + # If the timeout has been exceeded, raise TimeoutError. + if timeout <= 0: + raise exceptions.TimeoutError('Timed out waiting for result.') + + # Wait a little while and try again. + time.sleep(_wait) + return self.exception( + timeout=timeout - _wait, + _wait=min(_wait * 2, timeout, 60), + ) + + def add_done_callback(self, fn): + """Attach the provided callable to the future. + + The provided function is called, with this future as its only argument, + when the future finishes running. + """ + if self.done(): + fn(self) + self._callbacks.append(fn) + + def set_result(self, result): + """Set the result of the future to the provided result. + + Args: + result (str): The message ID. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_result can only be called once.') + + # Set the result and trigger the future. + self._result = result + self._trigger() + + def set_exception(self, exception): + """Set the result of the future to the given exception. + + Args: + exception (:exc:`Exception`): The exception raised. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_exception can only be called once.') + + # Set the exception and trigger the future. + self._exception = exception + self._trigger() + + def _trigger(self): + """Trigger all callbacks registered to this Future. + + This method is called internally by the batch once the batch + completes. + + Args: + message_id (str): The message ID, as a string. + """ + for callback in self._callbacks: + callback(self) From 9fd490c819f1060d9814d4610b67901087b3d9d6 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:50:40 -0700 Subject: [PATCH 83/86] Add is not None. --- pubsub/google/cloud/pubsub_v1/publisher/client.py | 2 +- pubsub/google/cloud/pubsub_v1/publisher/exceptions.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index f3e71062caa7..1a9903a31748 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -87,7 +87,7 @@ def batch(self, topic, message, create=True, autocommit=True): # and place it on the batches dictionary. with self._batch_lock: batch = self._batches.get(topic, None) - if not batch or not batch.will_accept(message): + if batch is None or not batch.will_accept(message): if not create: return None batch = self._batch_class( diff --git a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index a78a692f748c..9ea094b082bf 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import + from google.api.core.exceptions import GoogleAPICallError from google.api.core.exceptions import GoogleAPIError From ee144aa796a2cba3bd14be8ba75ed9bd96bf08a3 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:51:35 -0700 Subject: [PATCH 84/86] Move the future tests to match the code. --- .../publisher/{batch/test_thread_future.py => test_futures.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename pubsub/tests/unit/pubsub_v1/publisher/{batch/test_thread_future.py => test_futures.py} (98%) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py b/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py similarity index 98% rename from pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py rename to pubsub/tests/unit/pubsub_v1/publisher/test_futures.py index 870c254c68c8..bbb2dcbd5cfa 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread_future.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py @@ -19,7 +19,7 @@ import pytest from google.cloud.pubsub_v1.publisher import exceptions -from google.cloud.pubsub_v1.publisher.batch.thread import Future +from google.cloud.pubsub_v1.publisher.futures import Future def test_cancel(): From 8cb8f98d10c309634b042354e412bd88e51444de Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:56:10 -0700 Subject: [PATCH 85/86] Fix a publish failure test. --- .../unit/pubsub_v1/publisher/batch/test_thread.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index bbbc9890a8b1..3daad6eec96b 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -121,14 +121,18 @@ def test_blocking_commit_no_messages(): def test_blocking_commit_wrong_messageid_length(): batch = create_batch() - batch.publish({'data': b'blah blah blah'}) - batch.publish({'data': b'blah blah blah blah'}) + futures = ( + batch.publish({'data': b'blah blah blah'}), + batch.publish({'data': b'blah blah blah blah'}), + ) # Set up a PublishResponse that only returns one message ID. with mock.patch.object(type(batch.client.api), 'publish') as publish: publish.return_value = types.PublishResponse(message_ids=['a']) - with pytest.raises(exceptions.PublishError): - batch._commit() + batch._commit() + for future in futures: + assert future.done() + assert isinstance(future.exception(), exceptions.PublishError) def test_monitor(): From 47678c3bc0aac674ddbd2d64d09bb12ae3429120 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 21 Aug 2017 13:58:01 -0700 Subject: [PATCH 86/86] Fix final test. --- pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py | 7 +------ pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py | 3 ++- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index bf52f4cc604f..0a6716af07ae 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -176,12 +176,7 @@ def call_rpc(self, request_generator): and blocks if there are no outstanding requests (until such time as there are). """ - return self._client.api.streaming_pull( - request_generator, - options=gax.CallOptions(**{ - 'grpc.max_receive_message_length': 20 * (1024 ** 2) + 1, - }), - ) + return self._client.api.streaming_pull(request_generator) def drop(self, ack_id, byte_size): """Remove the given ack ID from lease management. diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index c03da7f81ddc..a4819b900639 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -88,7 +88,8 @@ def open(self, callback): # Start the thread to pass the requests. logger.debug('Starting callback requests worker.') self._callback = callback - self._consumer.helper_threads.start('callback requests worker', + self._consumer.helper_threads.start( + 'callback requests worker', self._request_queue, self._callback_requests, )