diff --git a/docs/index.rst b/docs/index.rst index ee47a2ac378f..d9afe5f31af4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,7 +8,7 @@ datastore/usage dns/usage language/usage - pubsub/usage + pubsub/index resource-manager/api runtimeconfig/usage spanner/usage diff --git a/docs/pubsub/client.rst b/docs/pubsub/client.rst deleted file mode 100644 index 2745c1d808ee..000000000000 --- a/docs/pubsub/client.rst +++ /dev/null @@ -1,6 +0,0 @@ -Pub/Sub Client -============== - -.. automodule:: google.cloud.pubsub.client - :members: - :show-inheritance: diff --git a/docs/pubsub/iam.rst b/docs/pubsub/iam.rst deleted file mode 100644 index 26943762605b..000000000000 --- a/docs/pubsub/iam.rst +++ /dev/null @@ -1,7 +0,0 @@ -IAM Policy -~~~~~~~~~~ - -.. automodule:: google.cloud.pubsub.iam - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/index.rst b/docs/pubsub/index.rst new file mode 100644 index 000000000000..7b7438b29f9c --- /dev/null +++ b/docs/pubsub/index.rst @@ -0,0 +1,117 @@ +####### +Pub/Sub +####### + +`Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that +allows you to send and receive messages between independent applications. You +can leverage Cloud Pub/Sub’s flexibility to decouple systems and components +hosted on Google Cloud Platform or elsewhere on the Internet. By building on +the same technology Google uses, Cloud Pub/Sub is designed to provide “at +least once” delivery at low latency with on-demand scalability to 1 million +messages per second (and beyond). + +.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/ + +******************************** +Authentication and Configuration +******************************** + +- For an overview of authentication in ``google-cloud-python``, + see :doc:`/core/auth`. + +- In addition to any authentication configuration, you should also set the + :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd + like to interact with. If the :envvar:`GOOGLE_CLOUD_PROJECT` environment + variable is not present, the project ID from JSON file credentials is used. + + If you are using Google App Engine or Google Compute Engine + this will be detected automatically. + +- After configuring your environment, create a + :class:`~google.cloud.pubsub_v1.PublisherClient` or + :class:`~google.cloud.pubsub_v1.SubscriberClient`. + +.. code-block:: python + + >>> from google.cloud import pubsub + >>> publisher = pubsub.PublisherClient() + >>> subscriber = pubsub.SubscriberClient() + +or pass in ``credentials`` explicitly. + +.. code-block:: python + + >>> from google.cloud import pubsub + >>> client = pubsub.PublisherClient( + ... credentials=creds, + ... ) + +********** +Publishing +********** + +To publish data to Cloud Pub/Sub you must create a topic, and then publish +messages to it + +.. code-block:: python + + >>> import os + >>> from google.cloud import pubsub + >>> + >>> publisher = pubsub.PublisherClient() + >>> topic = 'projects/{project_id}/topics/{topic}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... topic='MY_TOPIC_NAME', # Set this to something appropriate. + ... ) + >>> publisher.create_topic() + >>> publisher.publish(topic, b'My first message!', spam='eggs') + +To learn more, consult the :doc:`publishing documentation `. + + +*********** +Subscribing +*********** + +To subscribe to data in Cloud Pub/Sub, you create a subscription based on +the topic, and subscribe to that. + +.. code-block:: python + + >>> import os + >>> from google.cloud import pubsub + >>> + >>> subscriber = pubsub.SubscriberClient() + >>> topic = 'projects/{project_id}/topics/{topic}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... topic='MY_TOPIC_NAME', # Set this to something appropriate. + ... ) + >>> subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. + ... ) + >>> subscription = subscriber.create_subscription(topic, subscription) + +The subscription is opened asychronously, and messages are processed by +use of a callback. + +.. code-block:: python + + >>> def callback(message): + ... print(message.data) + ... message.ack() + >>> subscription.open(callback) + +To learn more, consult the :doc:`subscriber documentation `. + + +********** +Learn More +********** + +.. toctree:: + :maxdepth: 3 + + publisher/index + subscriber/index + types diff --git a/docs/pubsub/message.rst b/docs/pubsub/message.rst deleted file mode 100644 index 654c607d46b3..000000000000 --- a/docs/pubsub/message.rst +++ /dev/null @@ -1,6 +0,0 @@ -Message -~~~~~~~ - -.. automodule:: google.cloud.pubsub.message - :members: - :show-inheritance: diff --git a/docs/pubsub/publisher/api/batch.rst b/docs/pubsub/publisher/api/batch.rst new file mode 100644 index 000000000000..5846d3ff9416 --- /dev/null +++ b/docs/pubsub/publisher/api/batch.rst @@ -0,0 +1,8 @@ +:orphan: + +Batch API +========= + +.. automodule:: google.cloud.pubsub_v1.publisher.batch.thread + :members: + :inherited-members: diff --git a/docs/pubsub/publisher/api/client.rst b/docs/pubsub/publisher/api/client.rst new file mode 100644 index 000000000000..47a3aa3d5d7a --- /dev/null +++ b/docs/pubsub/publisher/api/client.rst @@ -0,0 +1,6 @@ +Publisher Client API +==================== + +.. automodule:: google.cloud.pubsub_v1.publisher.client + :members: + :inherited-members: diff --git a/docs/pubsub/publisher/index.rst b/docs/pubsub/publisher/index.rst new file mode 100644 index 000000000000..72b374b588a3 --- /dev/null +++ b/docs/pubsub/publisher/index.rst @@ -0,0 +1,126 @@ +Publishing Messages +=================== + +Publishing messages is handled through the +:class:`~.pubsub_v1.publisher.client.Client` class (aliased as +``google.cloud.pubsub.PublisherClient``). This class provides methods to +create topics, and (most importantly) a +:meth:`~.pubsub_v1.publisher.client.Client.publish` method that publishes +messages to Pub/Sub. + +Instantiating a publishing client is straightforward: + +.. code-block:: python + + from google.cloud import pubsub + publish_client = pubsub.PublisherClient() + + +Publish a Message +----------------- + +To publish a message, use the +:meth:`~.pubsub_v1.publisher.client.Client.publish` method. This method accepts +two positional arguments: the topic to publish to, and the body of the message. +It also accepts arbitrary keyword arguments, which are passed along as +attributes of the message. + +The topic is passed along as a string; all topics have the canonical form of +``projects/{project_name}/topics/{topic_name}``. + +Therefore, a very basic publishing call looks like: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}' + publish_client.publish(topic, b'This is my message.') + +.. note:: + + The message data in Pub/Sub is an opaque blob of bytes, and as such, you + *must* send a ``bytes`` object in Python 3 (``str`` object in Python 2). + If you send a text string (``str`` in Python 3, ``unicode`` in Python 2), + the method will raise :exc:`TypeError`. + + The reason it works this way is because there is no reasonable guarantee + that the same language or environment is being used by the subscriber, + and so it is the responsibility of the publisher to properly encode + the payload. + +If you want to include attributes, simply add keyword arguments: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}' + publish_client.publish(topic, b'This is my message.', foo='bar') + + +Batching +-------- + +Whenever you publish a message, a +:class:`~.pubsub_v1.publisher.batch.thread.Batch` is automatically created. +This way, if you publish a large volume of messages, it reduces the number of +requests made to the server. + +The way that this works is that on the first message that you send, a new +:class:`~.pubsub_v1.publisher.batch.thread.Batch` is created automatically. +For every subsequent message, if there is already a valid batch that is still +accepting messages, then that batch is used. When the batch is created, it +begins a countdown that publishes the batch once sufficient time has +elapsed (by default, this is 0.05 seconds). + +If you need different batching settings, simply provide a +:class:`~.pubsub_v1.types.BatchSettings` object when you instantiate the +:class:`~.pubsub_v1.publisher.client.Client`: + +.. code-block:: python + + from google.cloud import pubsub + from google.cloud.pubsub import types + + client = pubsub.PublisherClient( + batch_settings=BatchSettings(max_messages=500), + ) + +Pub/Sub accepts a maximum of 1,000 messages in a batch, and the size of a +batch can not exceed 10 megabytes. + + +Futures +------- + +Every call to :meth:`~.pubsub_v1.publisher.client.Client.publish` will return +a class that conforms to the :class:`~concurrent.futures.Future` interface. +You can use this to ensure that the publish succeeded: + +.. code-block:: python + + # The .result() method will block until the future is complete. + # If there is an error, it will raise an exception. + future = client.publish(topic, b'My awesome message.') + message_id = future.result() + +You can also attach a callback to the future: + +.. code-block:: python + + # Callbacks receive the future as their only argument, as defined in + # the Future interface. + def callback(future): + message_id = future.result() + do_something_with(message_id) + + # The callback is added once you get the future. If you add a callback + # and the future is already done, it will simply be executed immediately. + future = client.publish(topic, b'My awesome message.') + future.add_done_callback(callback) + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + api/client diff --git a/docs/pubsub/snippets.py b/docs/pubsub/snippets.py deleted file mode 100644 index 96eea175c0cd..000000000000 --- a/docs/pubsub/snippets.py +++ /dev/null @@ -1,483 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Testable usage examples for Google Cloud Pubsub API wrapper - -Each example function takes a ``client`` argument (which must be an instance -of :class:`google.cloud.pubsub.client.Client`) and uses it to perform a task -with the API. - -To facilitate running the examples as system tests, each example is also passed -a ``to_delete`` list; the function adds to the list any objects created which -need to be deleted during teardown. -""" - -import time - -from google.cloud.pubsub.client import Client - - -def snippet(func): - """Mark ``func`` as a snippet example function.""" - func._snippet = True - return func - - -def _millis(): - return time.time() * 1000 - - -@snippet -def client_list_topics(client, to_delete): # pylint: disable=unused-argument - """List topics for a project.""" - - def do_something_with(sub): # pylint: disable=unused-argument - pass - - # [START client_list_topics] - for topic in client.list_topics(): # API request(s) - do_something_with(topic) - # [END client_list_topics] - - -@snippet -def client_list_subscriptions(client, - to_delete): # pylint: disable=unused-argument - """List all subscriptions for a project.""" - - def do_something_with(sub): # pylint: disable=unused-argument - pass - - # [START client_list_subscriptions] - for subscription in client.list_subscriptions(): # API request(s) - do_something_with(subscription) - # [END client_list_subscriptions] - - -@snippet -def client_topic(client, to_delete): # pylint: disable=unused-argument - """Topic factory.""" - TOPIC_NAME = 'topic_factory-%d' % (_millis(),) - - # [START client_topic] - topic = client.topic(TOPIC_NAME) - # [END client_topic] - - -@snippet -def client_subscription(client, to_delete): # pylint: disable=unused-argument - """Subscription factory.""" - SUBSCRIPTION_NAME = 'subscription_factory-%d' % (_millis(),) - - # [START client_subscription] - subscription = client.subscription( - SUBSCRIPTION_NAME, ack_deadline=60, - retain_acked_messages=True) - # [END client_subscription] - - -@snippet -def topic_create(client, to_delete): - """Create a topic.""" - TOPIC_NAME = 'topic_create-%d' % (_millis(),) - - # [START topic_create] - topic = client.topic(TOPIC_NAME) - topic.create() # API request - # [END topic_create] - - to_delete.append(topic) - - -@snippet -def topic_exists(client, to_delete): - """Test existence of a topic.""" - TOPIC_NAME = 'topic_exists-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - to_delete.append(topic) - - # [START topic_exists] - assert not topic.exists() # API request - topic.create() # API request - assert topic.exists() # API request - # [END topic_exists] - - -@snippet -def topic_delete(client, to_delete): # pylint: disable=unused-argument - """Delete a topic.""" - TOPIC_NAME = 'topic_delete-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() # API request - - # [START topic_delete] - assert topic.exists() # API request - topic.delete() - assert not topic.exists() # API request - # [END topic_delete] - - -@snippet -def topic_iam_policy(client, to_delete): - """Fetch / set a topic's IAM policy.""" - TOPIC_NAME = 'topic_iam_policy-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_get_iam_policy] - policy = topic.get_iam_policy() # API request - # [END topic_get_iam_policy] - - assert len(policy.viewers) == 0 - assert len(policy.editors) == 0 - assert len(policy.owners) == 0 - - # [START topic_set_iam_policy] - ALL_USERS = policy.all_users() - policy.viewers = [ALL_USERS] - LOGS_GROUP = policy.group('cloud-logs@google.com') - policy.editors = [LOGS_GROUP] - new_policy = topic.set_iam_policy(policy) # API request - # [END topic_set_iam_policy] - - assert ALL_USERS in new_policy.viewers - assert LOGS_GROUP in new_policy.editors - - -# @snippet # Disabled due to #1687 -def topic_check_iam_permissions(client, to_delete): - """Check topic IAM permissions.""" - TOPIC_NAME = 'topic_check_iam_permissions-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_check_iam_permissions] - from google.cloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE - TO_CHECK = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = topic.check_iam_permissions(TO_CHECK) - assert set(ALLOWED) == set(TO_CHECK) - # [END topic_check_iam_permissions] - - -@snippet -def topic_publish_messages(client, to_delete): - """Publish messages to a topic.""" - TOPIC_NAME = 'topic_publish_messages-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_publish_simple_message] - topic.publish(b'This is the message payload') # API request - # [END topic_publish_simple_message] - - # [START topic_publish_message_with_attrs] - topic.publish(b'Another message payload', extra='EXTRA') # API request - # [END topic_publish_message_with_attrs] - - -@snippet -def topic_subscription(client, to_delete): - """Create subscriptions to a topic.""" - TOPIC_NAME = 'topic_subscription-%d' % (_millis(),) - SUB_DEFAULTS = 'topic_subscription-defaults-%d' % (_millis(),) - SUB_ACK90 = 'topic_subscription-ack90-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_subscription_defaults] - sub_defaults = topic.subscription(SUB_DEFAULTS) - # [END topic_subscription_defaults] - - sub_defaults.create() # API request - to_delete.append(sub_defaults) - expected_names = set() - expected_names.add(sub_defaults.full_name) - - # [START topic_subscription_ack90] - sub_ack90 = topic.subscription(SUB_ACK90, ack_deadline=90) - # [END topic_subscription_ack90] - - sub_ack90.create() # API request - to_delete.append(sub_ack90) - expected_names.add(sub_ack90.full_name) - - sub_names = set() - - def do_something_with(sub): - sub_names.add(sub.full_name) - - # [START topic_list_subscriptions] - for subscription in topic.list_subscriptions(): # API request(s) - do_something_with(subscription) - # [END topic_list_subscriptions] - - assert sub_names.issuperset(expected_names) - - -# @snippet: disabled, because push-mode requires a validated endpoint URL -def topic_subscription_push(client, to_delete): - """Create subscriptions to a topic.""" - TOPIC_NAME = 'topic_subscription_push-%d' % (_millis(),) - SUB_PUSH = 'topic_subscription_push-sub-%d' % (_millis(),) - PUSH_URL = 'https://api.example.com/push-endpoint' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_subscription_push] - subscription = topic.subscription(SUB_PUSH, push_endpoint=PUSH_URL) - subscription.create() # API request - # [END topic_subscription_push] - - # [START subscription_push_pull] - subscription.modify_push_configuration(push_endpoint=None) # API request - # [END subscription_push_pull] - - # [START subscription_pull_push] - subscription.modify_push_configuration( - push_endpoint=PUSH_URL) # API request - # [END subscription_pull_push] - - -@snippet -def subscription_lifecycle(client, to_delete): - """Test lifecycle of a subscription.""" - TOPIC_NAME = 'subscription_lifecycle-%d' % (_millis(),) - SUB_NAME = 'subscription_lifecycle-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START subscription_create] - subscription = topic.subscription(SUB_NAME) - subscription.create() # API request - # [END subscription_create] - - # [START subscription_exists] - assert subscription.exists() # API request - # [END subscription_exists] - - # [START subscription_reload] - subscription.reload() # API request - # [END subscription_reload] - - # [START subscription_delete] - subscription.delete() # API request - # [END subscription_delete] - - -@snippet -def subscription_pull(client, to_delete): - """Pull messges from a subscribed topic.""" - TOPIC_NAME = 'subscription_pull-%d' % (_millis(),) - SUB_NAME = 'subscription_pull-defaults-%d' % (_millis(),) - PAYLOAD1 = b'PAYLOAD1' - PAYLOAD2 = b'PAYLOAD2' - EXTRA = 'EXTRA' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_pull_return_immediately] - pulled = subscription.pull(return_immediately=True) - # [END subscription_pull_return_immediately] - assert len(pulled) == 0, "unexpected message" - - topic.publish(PAYLOAD1) - topic.publish(PAYLOAD2, extra=EXTRA) - - time.sleep(1) # eventually-consistent - - # [START subscription_pull] - pulled = subscription.pull(max_messages=2) - # [END subscription_pull] - - assert len(pulled) == 2, "eventual consistency" - - # [START subscription_modify_ack_deadline] - for ack_id, _ in pulled: - subscription.modify_ack_deadline(ack_id, 90) # API request - # [END subscription_modify_ack_deadline] - - payloads = [] - extras = [] - - def do_something_with(message): # pylint: disable=unused-argument - payloads.append(message.data) - if message.attributes: - extras.append(message.attributes) - - class ApplicationException(Exception): - pass - - def log_exception(_): - pass - - # [START subscription_acknowledge] - for ack_id, message in pulled: - try: - do_something_with(message) - except ApplicationException as e: - log_exception(e) - else: - subscription.acknowledge([ack_id]) - # [END subscription_acknowledge] - - assert set(payloads) == set([PAYLOAD1, PAYLOAD2]), 'payloads: %s' % ( - (payloads,)) - assert extras == [{'extra': EXTRA}], 'extras: %s' % ( - (extras,)) - - -@snippet -def subscription_pull_w_autoack(client, to_delete): - """Pull messges from a topic, auto-acknowldging them""" - TOPIC_NAME = 'subscription_pull_autoack-%d' % (_millis(),) - SUB_NAME = 'subscription_pull_autoack-defaults-%d' % (_millis(),) - PAYLOAD1 = b'PAYLOAD1' - PAYLOAD2 = b'PAYLOAD2' - EXTRA = 'EXTRA' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START topic_batch] - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, extra=EXTRA) - # [END topic_batch] - - time.sleep(1) # eventually-consistent - - payloads = [] - extras = [] - - def do_something_with(message): # pylint: disable=unused-argument - payloads.append(message.data) - if message.attributes: - extras.append(message.attributes) - - # [START subscription_pull_autoack] - from google.cloud.pubsub.subscription import AutoAck - with AutoAck(subscription, max_messages=10) as ack: - for ack_id, message in list(ack.items()): - try: - do_something_with(message) - except Exception: # pylint: disable=broad-except - del ack[ack_id] - # [END subscription_pull_autoack] - - assert set(payloads) == set(PAYLOAD1, PAYLOAD1), "eventual consistency" - assert extras == [{'extra': EXTRA}], "eventual consistency" - - -@snippet -def subscription_iam_policy(client, to_delete): - """Fetch / set a subscription's IAM policy.""" - TOPIC_NAME = 'subscription_iam_policy-%d' % (_millis(),) - SUB_NAME = 'subscription_iam_policy-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_get_iam_policy] - policy = subscription.get_iam_policy() # API request - # [END subscription_get_iam_policy] - - assert len(policy.viewers) == 0 - assert len(policy.editors) == 0 - assert len(policy.owners) == 0 - - # [START subscription_set_iam_policy] - ALL_USERS = policy.all_users() - policy.viewers = [ALL_USERS] - LOGS_GROUP = policy.group('cloud-logs@google.com') - policy.editors = [LOGS_GROUP] - new_policy = subscription.set_iam_policy(policy) # API request - # [END subscription_set_iam_policy] - - assert ALL_USERS in new_policy.viewers - assert LOGS_GROUP in new_policy.editors - - -# @snippet # Disabled due to #1687 -def subscription_check_iam_permissions(client, to_delete): - """Check subscription IAM permissions.""" - TOPIC_NAME = 'subscription_check_iam_permissions-%d' % (_millis(),) - SUB_NAME = 'subscription_check_iam_permissions-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_check_iam_permissions] - from google.cloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE - TO_CHECK = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = subscription.check_iam_permissions(TO_CHECK) - assert set(ALLOWED) == set(TO_CHECK) - # [END subscription_check_iam_permissions] - - -def _line_no(func): - code = getattr(func, '__code__', None) or getattr(func, 'func_code') - return code.co_firstlineno - - -def _find_examples(): - funcs = [obj for obj in globals().values() - if getattr(obj, '_snippet', False)] - for func in sorted(funcs, key=_line_no): - yield func - - -def _name_and_doc(func): - return func.__name__, func.__doc__ - - -def main(): - client = Client() - for example in _find_examples(): - to_delete = [] - print('%-25s: %s' % _name_and_doc(example)) - try: - example(client, to_delete) - except AssertionError as e: - print(' FAIL: %s' % (e,)) - except Exception as e: # pylint: disable=broad-except - print(' ERROR: %r' % (e,)) - for item in to_delete: - item.delete() - - -if __name__ == '__main__': - main() diff --git a/docs/pubsub/subscriber/api/client.rst b/docs/pubsub/subscriber/api/client.rst new file mode 100644 index 000000000000..965880c5a640 --- /dev/null +++ b/docs/pubsub/subscriber/api/client.rst @@ -0,0 +1,6 @@ +Subscriber Client API +===================== + +.. automodule:: google.cloud.pubsub_v1.subscriber.client + :members: + :inherited-members: diff --git a/docs/pubsub/subscriber/api/message.rst b/docs/pubsub/subscriber/api/message.rst new file mode 100644 index 000000000000..d6566f4c363e --- /dev/null +++ b/docs/pubsub/subscriber/api/message.rst @@ -0,0 +1,5 @@ +Messages +======== + +.. autoclass:: google.cloud.pubsub_v1.subscriber.message.Message + :members: ack, attributes, data, nack, publish_time diff --git a/docs/pubsub/subscriber/api/policy.rst b/docs/pubsub/subscriber/api/policy.rst new file mode 100644 index 000000000000..95d288d0b974 --- /dev/null +++ b/docs/pubsub/subscriber/api/policy.rst @@ -0,0 +1,5 @@ +Subscriptions +============= + +.. autoclass:: google.cloud.pubsub_v1.subscriber.policy.thread.Policy + :members: open, close diff --git a/docs/pubsub/subscriber/index.rst b/docs/pubsub/subscriber/index.rst new file mode 100644 index 000000000000..be32a9e9ed97 --- /dev/null +++ b/docs/pubsub/subscriber/index.rst @@ -0,0 +1,123 @@ +Subscribing to Messages +======================= + +Subscribing to messages is handled through the +:class:`~.pubsub_v1.subscriber.client.Client` class (aliased as +``google.cloud.pubsub.SubscriberClient``). This class provides a +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method to +attach to subscriptions on existing topics, and (most importantly) a +:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method that +consumes messages from Pub/Sub. + +Instantiating a subscriber client is straightforward: + +.. code-block:: python + + from google.cloud import pubsub + subscriber = pubsub.SubscriberClient() + + +Creating a Subscription +----------------------- + +In Pub/Sub, a **subscription** is a discrete pull of messages from a topic. +If multiple clients pull the same subscription, then messages are split +between them. If multiple clients create a subscription each, then each client +will get every message. + +.. note:: + + Remember that Pub/Sub operates under the principle of "everything at least + once". Even in the case where multiple clients pull the same subscription, + *some* redundancy is likely. + +Creating a subscription requires that you already know what topic you want +to subscribe to, and it must already exist. Once you have that, it is easy: + +.. code-block:: python + + # Substitute {project}, {topic}, and {subscription} with appropriate + # values for your application. + topic_name = 'projects/{project}/topics/{topic}' + sub_name = 'projects/{project}/subscriptions/{subscription}' + subscriber.create_subscription(topic_name, sub_name) + + +Pulling a Subscription +---------------------- + +Once you have created a subscription (or if you already had one), the next +step is to pull data from it. This entails two steps: first you must call +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe`, passing in the +subscription string. + +.. code-block:: python + + # As before, substitute {project} and {subscription} with appropriate + # values for your application. + subscription = subscriber.subscribe( + 'projects/{project}/subscriptions/{subscription}', + ) + +This will return an object with an +:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method; calling +this method will actually begin consumption of the subscription. + + +Subscription Callbacks +---------------------- + +Because subscriptions in this Pub/Sub client are opened asychronously, +processing the messages that are yielded by the subscription is handled +through **callbacks**. + +The basic idea: Define a function that takes one argument; this argument +will be a :class:`~.pubsub_v1.subscriber.message.Message` instance. This +function should do whatever processing is necessary. At the end, the +function should :meth:`~.pubsub_v1.subscriber.message.Message.ack` the +message. + +When you call :meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open`, you +must pass the callback that will be used. + +Here is an example: + +.. code-block:: python + + # Define the callback. + # Note that the callback is defined *before* the subscription is opened. + def callback(message): + do_something_with(message) # Replace this with your acutal logic. + message.ack() + + # Open the subscription, passing the callback. + subscription.open(callback) + +Explaining Ack +-------------- + +In Pub/Sub, the term **ack** stands for "acknowledge". You should ack a +message when your processing of that message *has completed*. When you ack +a message, you are telling Pub/Sub that you do not need to see it again. + +It might be tempting to ack messages immediately on receipt. While there +are valid use cases for this, in general it is unwise. The reason why: If +there is some error or edge case in your processing logic, and processing +of the message fails, you will have already told Pub/Sub that you successfully +processed the message. By contrast, if you ack only upon completion, then +Pub/Sub will eventually re-deliver the unacknowledged message. + +It is also possible to **nack** a message, which is the opposite. When you +nack, it tells Pub/Sub that you are unable or unwilling to deal with the +message, and that the service should redeliver it. + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + api/client + api/policy + api/message diff --git a/docs/pubsub/subscription.rst b/docs/pubsub/subscription.rst deleted file mode 100644 index f242cb644e83..000000000000 --- a/docs/pubsub/subscription.rst +++ /dev/null @@ -1,7 +0,0 @@ -Subscriptions -~~~~~~~~~~~~~ - -.. automodule:: google.cloud.pubsub.subscription - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/topic.rst b/docs/pubsub/topic.rst deleted file mode 100644 index 323d467a08ce..000000000000 --- a/docs/pubsub/topic.rst +++ /dev/null @@ -1,7 +0,0 @@ -Topics -~~~~~~ - -.. automodule:: google.cloud.pubsub.topic - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/types.rst b/docs/pubsub/types.rst new file mode 100644 index 000000000000..87c987571766 --- /dev/null +++ b/docs/pubsub/types.rst @@ -0,0 +1,5 @@ +Pub/Sub Client Types +==================== + +.. automodule:: google.cloud.pubsub_v1.types + :members: diff --git a/docs/pubsub/usage.rst b/docs/pubsub/usage.rst deleted file mode 100644 index 96727e654835..000000000000 --- a/docs/pubsub/usage.rst +++ /dev/null @@ -1,245 +0,0 @@ -Pub / Sub -========= - - -.. toctree:: - :maxdepth: 2 - :hidden: - - client - topic - subscription - message - iam - -Authentication / Configuration ------------------------------- - -- Use :class:`Client ` objects to configure - your applications. - -- In addition to any authentication configuration, you should also set the - :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd like - to interact with. If you are Google App Engine or Google Compute Engine - this will be detected automatically. - -- The library now enables the ``gRPC`` transport for the pubsub API by - default, assuming that the required dependencies are installed and - importable. To *disable* this transport, set the - :envvar:`GOOGLE_CLOUD_DISABLE_GRPC` environment variable to a - non-empty string, e.g.: ``$ export GOOGLE_CLOUD_DISABLE_GRPC=true``. - -- :class:`Client ` objects hold both a ``project`` - and an authenticated connection to the PubSub service. - -- The authentication credentials can be implicitly determined from the - environment or directly via - :meth:`from_service_account_json ` - and - :meth:`from_service_account_p12 `. - -- After setting ``GOOGLE_APPLICATION_CREDENTIALS`` and ``GOOGLE_CLOUD_PROJECT`` - environment variables, create a :class:`Client ` - - .. code-block:: python - - >>> from google.cloud import pubsub - >>> client = pubsub.Client() - - -Manage topics for a project ---------------------------- - -List topics for the default project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_topics] - :end-before: [END client_list_topics] - -Create a new topic for the default project: - -.. literalinclude:: snippets.py - :start-after: [START topic_create] - :end-before: [END topic_create] - -Check for the existence of a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_exists] - :end-before: [END topic_exists] - -Delete a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_delete] - :end-before: [END topic_delete] - -Fetch the IAM policy for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_get_iam_policy] - :end-before: [END topic_get_iam_policy] - -Update the IAM policy for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_set_iam_policy] - :end-before: [END topic_set_iam_policy] - -Test permissions allowed by the current IAM policy on a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_check_iam_permissions] - :end-before: [END topic_check_iam_permissions] - - -Publish messages to a topic ---------------------------- - -Publish a single message to a topic, without attributes: - -.. literalinclude:: snippets.py - :start-after: [START topic_publish_simple_message] - :end-before: [END topic_publish_simple_message] - -Publish a single message to a topic, with attributes: - -.. literalinclude:: snippets.py - :start-after: [START topic_publish_message_with_attrs] - :end-before: [END topic_publish_message_with_attrs] - -Publish a set of messages to a topic (as a single request): - -.. literalinclude:: snippets.py - :start-after: [START topic_batch] - :end-before: [END topic_batch] - -.. note:: - - The only API request happens during the ``__exit__()`` of the topic - used as a context manager, and only if the block exits without raising - an exception. - - -Manage subscriptions to topics ------------------------------- - -List all subscriptions for the default project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_subscriptions] - :end-before: [END client_list_subscriptions] - -List subscriptions for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_list_subscriptions] - :end-before: [END topic_list_subscriptions] - -Create a new pull subscription for a topic, with defaults: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_defaults] - :end-before: [END topic_subscription_defaults] - -Create a new pull subscription for a topic with a non-default ACK deadline: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_ack90] - :end-before: [END topic_subscription_ack90] - -Create a new push subscription for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_push] - :end-before: [END topic_subscription_push] - -Check for the existence of a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_exists] - :end-before: [END subscription_exists] - -Convert a pull subscription to push: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_push] - :end-before: [END subscription_pull_push] - -Convert a push subscription to pull: - -.. literalinclude:: snippets.py - :start-after: [START subscription_push_pull] - :end-before: [END subscription_push_pull] - -Re-synchronize a subscription with the back-end: - -.. literalinclude:: snippets.py - :start-after: [START subscription_reload] - :end-before: [END subscription_reload] - -Fetch the IAM policy for a subscription - -.. literalinclude:: snippets.py - :start-after: [START subscription_get_iam_policy] - :end-before: [END subscription_get_iam_policy] - -Update the IAM policy for a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_set_iam_policy] - :end-before: [END subscription_set_iam_policy] - -Test permissions allowed by the current IAM policy on a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_check_iam_permissions] - :end-before: [END subscription_check_iam_permissions] - -Delete a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_delete] - :end-before: [END subscription_delete] - - -Pull messages from a subscription ---------------------------------- - -Fetch pending messages for a pull subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull] - :end-before: [END subscription_pull] - -Note that received messages must be acknowledged, or else the back-end -will re-send them later: - -.. literalinclude:: snippets.py - :start-after: [START subscription_acknowledge] - :end-before: [END subscription_acknowledge] - -Fetch messages for a pull subscription without blocking (none pending): - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_return_immediately] - :end-before: [END subscription_pull_return_immediately] - -Update the acknowlegement deadline for pulled messages: - -.. literalinclude:: snippets.py - :start-after: [START subscription_modify_ack_deadline] - :end-before: [END subscription_modify_ack_deadline] - -Fetch pending messages, acknowledging those whose processing doesn't raise an -error: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_autoack] - :end-before: [END subscription_pull_autoack] - -.. note:: - - The ``pull`` API request occurs at entry to the ``with`` block, and the - ``acknowlege`` API request occurs at the end, passing only the ``ack_ids`` - which haven't been deleted from ``ack`` diff --git a/pubsub/.coveragerc b/pubsub/.coveragerc index a54b99aa14b7..41ca7428e2ee 100644 --- a/pubsub/.coveragerc +++ b/pubsub/.coveragerc @@ -1,11 +1,17 @@ [run] branch = True +source = + google.cloud.pubsub + google.cloud.pubsub_v1 + tests.unit [report] -fail_under = 100 show_missing = True + exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError diff --git a/pubsub/google/cloud/gapic/__init__.py b/pubsub/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/gapic/pubsub/__init__.py b/pubsub/google/cloud/gapic/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/tests/__init__.py b/pubsub/google/cloud/gapic/pubsub/v1/__init__.py similarity index 100% rename from pubsub/tests/__init__.py rename to pubsub/google/cloud/gapic/pubsub/v1/__init__.py diff --git a/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py new file mode 100644 index 000000000000..c0466e6d444b --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py @@ -0,0 +1,565 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Publisher API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class PublisherClient(object): + """ + The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_topics': + _PageDesc('page_token', 'next_page_token', 'topics'), + 'list_topic_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A PublisherClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'publisher_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Publisher', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.publisher_stub = config.create_stub( + pubsub_pb2.PublisherStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_topic = api_callable.create_api_call( + self.publisher_stub.CreateTopic, settings=defaults['create_topic']) + self._publish = api_callable.create_api_call( + self.publisher_stub.Publish, settings=defaults['publish']) + self._get_topic = api_callable.create_api_call( + self.publisher_stub.GetTopic, settings=defaults['get_topic']) + self._list_topics = api_callable.create_api_call( + self.publisher_stub.ListTopics, settings=defaults['list_topics']) + self._list_topic_subscriptions = api_callable.create_api_call( + self.publisher_stub.ListTopicSubscriptions, + settings=defaults['list_topic_subscriptions']) + self._delete_topic = api_callable.create_api_call( + self.publisher_stub.DeleteTopic, settings=defaults['delete_topic']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_topic(self, name, options=None): + """ + Creates the given topic with the given name. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> name = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_topic(name) + + Args: + name (string): The name of the topic. It must have the format + ``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter, + and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent + signs (``%``). It must be between 3 and 255 characters in length, and it + must not start with ``\"goog\"``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Topic(name=name) + return self._create_topic(request, options) + + def publish(self, topic, messages, options=None): + """ + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'' + >>> messages_element = pubsub_pb2.PubsubMessage(data=data) + >>> messages = [messages_element] + >>> response = client.publish(topic, messages) + + Args: + topic (string): The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages (list[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PubsubMessage`]): The messages to publish. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PublishResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + return self._publish(request, options) + + def get_topic(self, topic, options=None): + """ + Gets the configuration of a topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_topic(topic) + + Args: + topic (string): The name of the topic to get. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetTopicRequest(topic=topic) + return self._get_topic(request, options) + + def list_topics(self, project, page_size=None, options=None): + """ + Lists matching topics. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topics(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that topics belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicsRequest( + project=project, page_size=page_size) + return self._list_topics(request, options) + + def list_topic_subscriptions(self, topic, page_size=None, options=None): + """ + Lists the name of the subscriptions for this topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topic_subscriptions(topic): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + topic (string): The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of string instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicSubscriptionsRequest( + topic=topic, page_size=page_size) + return self._list_topic_subscriptions(request, options) + + def delete_topic(self, topic, options=None): + """ + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> client.delete_topic(topic) + + Args: + topic (string): Name of the topic to delete. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteTopicRequest(topic=topic) + self._delete_topic(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.iam.v1 import policy_pb2 + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json new file mode 100644 index 000000000000..7e8a723499e6 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json @@ -0,0 +1,98 @@ +{ + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "one_plus_delivery": [ + "CANCELLED", + "UNKNOWN", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "ABORTED", + "INTERNAL", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Publish": { + "timeout_millis": 60000, + "retry_codes_name": "one_plus_delivery", + "retry_params_name": "messaging", + "bundling": { + "element_count_threshold": 10, + "element_count_limit": 1000, + "request_byte_threshold": 1024, + "request_byte_limit": 10485760, + "delay_threshold_millis": 10 + } + }, + "GetTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopics": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopicSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py new file mode 100644 index 000000000000..5313e0d941a1 --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -0,0 +1,1063 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Subscriber API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +from google.gax.utils import oneof +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class SubscriberClient(object): + """ + The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the ``Pull`` method. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions'), + 'list_snapshots': + _PageDesc('page_token', 'next_page_token', 'snapshots') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _SNAPSHOT_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/snapshots/{snapshot}') + _SUBSCRIPTION_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/subscriptions/{subscription}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def snapshot_path(cls, project, snapshot): + """Returns a fully-qualified snapshot resource name string.""" + return cls._SNAPSHOT_PATH_TEMPLATE.render({ + 'project': project, + 'snapshot': snapshot, + }) + + @classmethod + def subscription_path(cls, project, subscription): + """Returns a fully-qualified subscription resource name string.""" + return cls._SUBSCRIPTION_PATH_TEMPLATE.render({ + 'project': + project, + 'subscription': + subscription, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_snapshot_name(cls, snapshot_name): + """Parses the project from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the project. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('project') + + @classmethod + def match_snapshot_from_snapshot_name(cls, snapshot_name): + """Parses the snapshot from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the snapshot. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('snapshot') + + @classmethod + def match_project_from_subscription_name(cls, subscription_name): + """Parses the project from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the project. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'project') + + @classmethod + def match_subscription_from_subscription_name(cls, subscription_name): + """Parses the subscription from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the subscription. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'subscription') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A SubscriberClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'subscriber_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Subscriber', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.subscriber_stub = config.create_stub( + pubsub_pb2.SubscriberStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_subscription = api_callable.create_api_call( + self.subscriber_stub.CreateSubscription, + settings=defaults['create_subscription']) + self._get_subscription = api_callable.create_api_call( + self.subscriber_stub.GetSubscription, + settings=defaults['get_subscription']) + self._update_subscription = api_callable.create_api_call( + self.subscriber_stub.UpdateSubscription, + settings=defaults['update_subscription']) + self._list_subscriptions = api_callable.create_api_call( + self.subscriber_stub.ListSubscriptions, + settings=defaults['list_subscriptions']) + self._delete_subscription = api_callable.create_api_call( + self.subscriber_stub.DeleteSubscription, + settings=defaults['delete_subscription']) + self._modify_ack_deadline = api_callable.create_api_call( + self.subscriber_stub.ModifyAckDeadline, + settings=defaults['modify_ack_deadline']) + self._acknowledge = api_callable.create_api_call( + self.subscriber_stub.Acknowledge, settings=defaults['acknowledge']) + self._pull = api_callable.create_api_call( + self.subscriber_stub.Pull, settings=defaults['pull']) + self._streaming_pull = api_callable.create_api_call( + self.subscriber_stub.StreamingPull, + settings=defaults['streaming_pull']) + self._modify_push_config = api_callable.create_api_call( + self.subscriber_stub.ModifyPushConfig, + settings=defaults['modify_push_config']) + self._list_snapshots = api_callable.create_api_call( + self.subscriber_stub.ListSnapshots, + settings=defaults['list_snapshots']) + self._create_snapshot = api_callable.create_api_call( + self.subscriber_stub.CreateSnapshot, + settings=defaults['create_snapshot']) + self._delete_snapshot = api_callable.create_api_call( + self.subscriber_stub.DeleteSnapshot, + settings=defaults['delete_snapshot']) + self._seek = api_callable.create_api_call( + self.subscriber_stub.Seek, settings=defaults['seek']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_subscription(self, + name, + topic, + push_config=None, + ack_deadline_seconds=None, + retain_acked_messages=None, + message_retention_duration=None, + options=None): + """ + Creates a subscription to a given topic. + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + `resource name format `_. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_subscription(name, topic) + + Args: + name (string): The name of the subscription. It must have the format + ``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must + start with a letter, and contain only letters (``[A-Za-z]``), numbers + (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters + in length, and it must not start with ``\"goog\"``. + topic (string): The name of the topic from which this subscription is receiving messages. + Format is ``projects/{project}/topics/{topic}``. + The value of this field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that the subscriber + will pull and ack messages using API methods. + ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message + before the subscriber should acknowledge the message. After message + delivery but before the ack deadline expires and before the message is + acknowledged, it is an outstanding message and will not be delivered + again during that time (on a best-effort basis). + + For pull subscriptions, this value is used as the initial value for the ack + deadline. To override this value for a given message, call + ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using + pull. + The minimum custom deadline you can specify is 10 seconds. + The maximum custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + + For push delivery, this value is also used to set the request timeout for + the call to the push endpoint. + + If the subscriber never acknowledges the message, the Pub/Sub + system will eventually redeliver the message. + retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then + messages are not expunged from the subscription's backlog, even if they are + acknowledged, until they fall out of the ``message_retention_duration`` + window. + message_retention_duration (:class:`google.protobuf.duration_pb2.Duration`): How long to retain unacknowledged messages in the subscription's backlog, + from the moment a message is published. + If ``retain_acked_messages`` is true, then this also configures the retention + of acknowledged messages, and thus configures how far back in time a ``Seek`` + can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Subscription( + name=name, + topic=topic, + push_config=push_config, + ack_deadline_seconds=ack_deadline_seconds, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration) + return self._create_subscription(request, options) + + def get_subscription(self, subscription, options=None): + """ + Gets the configuration details of a subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_subscription(subscription) + + Args: + subscription (string): The name of the subscription to get. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + return self._get_subscription(request, options) + + def update_subscription(self, subscription, update_mask, options=None): + """ + Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> from google.protobuf import field_mask_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = pubsub_pb2.Subscription() + >>> update_mask = field_mask_pb2.FieldMask() + >>> response = client.update_subscription(subscription, update_mask) + + Args: + subscription (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription`): The updated subscription object. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.UpdateSubscriptionRequest( + subscription=subscription, update_mask=update_mask) + return self._update_subscription(request, options) + + def list_subscriptions(self, project, page_size=None, options=None): + """ + Lists matching subscriptions. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_subscriptions(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSubscriptionsRequest( + project=project, page_size=page_size) + return self._list_subscriptions(request, options) + + def delete_subscription(self, subscription, options=None): + """ + Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to ``Pull`` after deletion will return + ``NOT_FOUND``. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> client.delete_subscription(subscription) + + Args: + subscription (string): The subscription to delete. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSubscriptionRequest( + subscription=subscription) + self._delete_subscription(request, options) + + def modify_ack_deadline(self, + subscription, + ack_ids, + ack_deadline_seconds, + options=None): + """ + Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level ``ackDeadlineSeconds`` used for subsequent messages. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> ack_deadline_seconds = 0 + >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): List of acknowledgment IDs. + ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to + the Pub/Sub system. For example, if the value is 10, the new + ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero may immediately make the message available for + another pull request. + The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 minutes). + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyAckDeadlineRequest( + subscription=subscription, + ack_ids=ack_ids, + ack_deadline_seconds=ack_deadline_seconds) + self._modify_ack_deadline(request, options) + + def acknowledge(self, subscription, ack_ids, options=None): + """ + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> client.acknowledge(subscription, ack_ids) + + Args: + subscription (string): The subscription whose message is being acknowledged. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): The acknowledgment ID for the messages being acknowledged that was returned + by the Pub/Sub system in the ``Pull`` response. Must not be empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.AcknowledgeRequest( + subscription=subscription, ack_ids=ack_ids) + self._acknowledge(request, options) + + def pull(self, + subscription, + max_messages, + return_immediately=None, + options=None): + """ + Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return ``UNAVAILABLE`` if + there are too many concurrent pull requests pending for the given + subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> max_messages = 0 + >>> response = client.pull(subscription, max_messages) + + Args: + subscription (string): The subscription from which messages should be pulled. + Format is ``projects/{project}/subscriptions/{sub}``. + max_messages (int): The maximum number of messages returned for this request. The Pub/Sub + system may return fewer than the number specified. + return_immediately (bool): If this field set to true, the system will respond immediately even if + it there are no messages available to return in the ``Pull`` response. + Otherwise, the system may wait (for a bounded amount of time) until at + least one message is available, rather than returning no messages. The + client may cancel the request if it does not wish to wait any longer for + the response. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PullResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PullRequest( + subscription=subscription, + max_messages=max_messages, + return_immediately=return_immediately) + return self._pull(request, options) + + def streaming_pull(self, requests, options=None): + """ + (EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status ``OK`` to reassign + server-side resources, in which case, the client should re-establish the + stream. ``UNAVAILABLE`` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> stream_ack_deadline_seconds = 0 + >>> request = pubsub_pb2.StreamingPullRequest(subscription=subscription, stream_ack_deadline_seconds=stream_ack_deadline_seconds) + >>> requests = [request] + >>> for element in client.streaming_pull(requests): + >>> # process element + >>> pass + + Args: + requests (iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullRequest`]): The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullResponse`]. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_pull(requests, options) + + def modify_push_config(self, subscription, push_config, options=None): + """ + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty ``PushConfig``) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the ``PushConfig``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> push_config = pubsub_pb2.PushConfig() + >>> client.modify_push_config(subscription, push_config) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system should + stop pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyPushConfigRequest( + subscription=subscription, push_config=push_config) + self._modify_push_config(request, options) + + def list_snapshots(self, project, page_size=None, options=None): + """ + Lists the existing snapshots. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_snapshots(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that snapshots belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSnapshotsRequest( + project=project, page_size=page_size) + return self._list_snapshots(request, options) + + def create_snapshot(self, name, subscription, options=None): + """ + Creates a snapshot from the requested subscription. + If the snapshot already exists, returns ``ALREADY_EXISTS``. + If the requested subscription doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + `resource name format `_. + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.create_snapshot(name, subscription) + + Args: + name (string): Optional user-provided name for this snapshot. + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription. + Note that for REST API requests, you must specify a name. + Format is ``projects/{project}/snapshots/{snap}``. + subscription (string): The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + + - The existing backlog on the subscription. More precisely, this is + defined as the messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + `CreateSnapshot` request; as well as: + - Any messages published to the subscription's topic following the + successful completion of the CreateSnapshot request. + + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.CreateSnapshotRequest( + name=name, subscription=subscription) + return self._create_snapshot(request, options) + + def delete_snapshot(self, snapshot, options=None): + """ + Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> client.delete_snapshot(snapshot) + + Args: + snapshot (string): The name of the snapshot to delete. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + self._delete_snapshot(request, options) + + def seek(self, subscription, time=None, snapshot=None, options=None): + """ + Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.seek(subscription) + + Args: + subscription (string): The subscription to affect. + time (:class:`google.protobuf.timestamp_pb2.Timestamp`): The time to seek to. + Messages retained in the subscription that were published before this + time are marked as acknowledged, and messages retained in the + subscription that were published after this time are marked as + unacknowledged. Note that this operation affects only those messages + retained in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). For example, + if ``time`` corresponds to a point before the message retention + window (or to a point before the system's notion of the subscription + creation time), only retained messages will be marked as unacknowledged, + and already-expunged messages will not be restored. + snapshot (string): The snapshot to seek to. The snapshot's topic must be the same as that of + the provided subscription. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.SeekResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + oneof.check_oneof( + time=time, + snapshot=snapshot, ) + + # Create the request object. + request = pubsub_pb2.SeekRequest( + subscription=subscription, time=time, snapshot=snapshot) + return self._seek(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.iam.v1 import policy_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json new file mode 100644 index 000000000000..6180cc0a941f --- /dev/null +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json @@ -0,0 +1,138 @@ +{ + "interfaces": { + "google.pubsub.v1.Subscriber": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ], + "pull": [ + "CANCELLED", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "INTERNAL", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + }, + "streaming": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 900000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 900000, + "total_timeout_millis": 900000 + } + }, + "methods": { + "CreateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ModifyAckDeadline": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Acknowledge": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "messaging" + }, + "Pull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "messaging" + }, + "StreamingPull": { + "timeout_millis": 900000, + "retry_codes_name": "pull", + "retry_params_name": "streaming" + }, + "ModifyPushConfig": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListSnapshots": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Seek": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/pubsub/google/cloud/proto/__init__.py b/pubsub/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/proto/pubsub/__init__.py b/pubsub/google/cloud/proto/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/pubsub/google/cloud/proto/pubsub/v1/__init__.py b/pubsub/google/cloud/proto/pubsub/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py new file mode 100644 index 000000000000..aeee99e182d0 --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py @@ -0,0 +1,3594 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/pubsub/v1/pubsub.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/pubsub/v1/pubsub.proto', + package='google.pubsub.v1', + syntax='proto3', + serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"y\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xf7\x10\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9a\x07\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TOPIC_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Topic.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Topic.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Topic.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + +_TOPIC = _descriptor.Descriptor( + name='Topic', + full_name='google.pubsub.v1.Topic', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Topic.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Topic.labels', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TOPIC_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=221, + serialized_end=342, +) + + +_PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PubsubMessage.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=515, + serialized_end=564, +) + +_PUBSUBMESSAGE = _descriptor.Descriptor( + name='PubsubMessage', + full_name='google.pubsub.v1.PubsubMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='google.pubsub.v1.PubsubMessage.data', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PubsubMessage.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_id', full_name='google.pubsub.v1.PubsubMessage.message_id', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='publish_time', full_name='google.pubsub.v1.PubsubMessage.publish_time', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=345, + serialized_end=564, +) + + +_GETTOPICREQUEST = _descriptor.Descriptor( + name='GetTopicRequest', + full_name='google.pubsub.v1.GetTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.GetTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=566, + serialized_end=598, +) + + +_UPDATETOPICREQUEST = _descriptor.Descriptor( + name='UpdateTopicRequest', + full_name='google.pubsub.v1.UpdateTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.UpdateTopicRequest.topic', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateTopicRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=600, + serialized_end=709, +) + + +_PUBLISHREQUEST = _descriptor.Descriptor( + name='PublishRequest', + full_name='google.pubsub.v1.PublishRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.PublishRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='messages', full_name='google.pubsub.v1.PublishRequest.messages', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=711, + serialized_end=793, +) + + +_PUBLISHRESPONSE = _descriptor.Descriptor( + name='PublishResponse', + full_name='google.pubsub.v1.PublishResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_ids', full_name='google.pubsub.v1.PublishResponse.message_ids', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=795, + serialized_end=833, +) + + +_LISTTOPICSREQUEST = _descriptor.Descriptor( + name='ListTopicsRequest', + full_name='google.pubsub.v1.ListTopicsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListTopicsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=835, + serialized_end=910, +) + + +_LISTTOPICSRESPONSE = _descriptor.Descriptor( + name='ListTopicsResponse', + full_name='google.pubsub.v1.ListTopicsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topics', full_name='google.pubsub.v1.ListTopicsResponse.topics', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=912, + serialized_end=998, +) + + +_LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListTopicSubscriptionsRequest', + full_name='google.pubsub.v1.ListTopicSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1000, + serialized_end=1085, +) + + +_LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListTopicSubscriptionsResponse', + full_name='google.pubsub.v1.ListTopicSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1087, + serialized_end=1167, +) + + +_DELETETOPICREQUEST = _descriptor.Descriptor( + name='DeleteTopicRequest', + full_name='google.pubsub.v1.DeleteTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.DeleteTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1169, + serialized_end=1204, +) + + +_SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Subscription.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Subscription.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Subscription.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + +_SUBSCRIPTION = _descriptor.Descriptor( + name='Subscription', + full_name='google.pubsub.v1.Subscription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Subscription.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Subscription.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.Subscription.push_config', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.Subscription.ack_deadline_seconds', index=3, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='retain_acked_messages', full_name='google.pubsub.v1.Subscription.retain_acked_messages', index=4, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_retention_duration', full_name='google.pubsub.v1.Subscription.message_retention_duration', index=5, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Subscription.labels', index=6, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_SUBSCRIPTION_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1207, + serialized_end=1532, +) + + +_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PushConfig.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PushConfig.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PushConfig.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=515, + serialized_end=564, +) + +_PUSHCONFIG = _descriptor.Descriptor( + name='PushConfig', + full_name='google.pubsub.v1.PushConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='push_endpoint', full_name='google.pubsub.v1.PushConfig.push_endpoint', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PushConfig.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1535, + serialized_end=1687, +) + + +_RECEIVEDMESSAGE = _descriptor.Descriptor( + name='ReceivedMessage', + full_name='google.pubsub.v1.ReceivedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ack_id', full_name='google.pubsub.v1.ReceivedMessage.ack_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='google.pubsub.v1.ReceivedMessage.message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1689, + serialized_end=1772, +) + + +_GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='GetSubscriptionRequest', + full_name='google.pubsub.v1.GetSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.GetSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1774, + serialized_end=1820, +) + + +_UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='UpdateSubscriptionRequest', + full_name='google.pubsub.v1.UpdateSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.UpdateSubscriptionRequest.subscription', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSubscriptionRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1823, + serialized_end=1953, +) + + +_LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListSubscriptionsRequest', + full_name='google.pubsub.v1.ListSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSubscriptionsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1955, + serialized_end=2037, +) + + +_LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListSubscriptionsResponse', + full_name='google.pubsub.v1.ListSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListSubscriptionsResponse.subscriptions', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2039, + serialized_end=2146, +) + + +_DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='DeleteSubscriptionRequest', + full_name='google.pubsub.v1.DeleteSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.DeleteSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2148, + serialized_end=2197, +) + + +_MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( + name='ModifyPushConfigRequest', + full_name='google.pubsub.v1.ModifyPushConfigRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyPushConfigRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.ModifyPushConfigRequest.push_config', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2199, + serialized_end=2297, +) + + +_PULLREQUEST = _descriptor.Descriptor( + name='PullRequest', + full_name='google.pubsub.v1.PullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.PullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_immediately', full_name='google.pubsub.v1.PullRequest.return_immediately', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_messages', full_name='google.pubsub.v1.PullRequest.max_messages', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2299, + serialized_end=2384, +) + + +_PULLRESPONSE = _descriptor.Descriptor( + name='PullResponse', + full_name='google.pubsub.v1.PullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.PullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2386, + serialized_end=2462, +) + + +_MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( + name='ModifyAckDeadlineRequest', + full_name='google.pubsub.v1.ModifyAckDeadlineRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids', index=1, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2464, + serialized_end=2559, +) + + +_ACKNOWLEDGEREQUEST = _descriptor.Descriptor( + name='AcknowledgeRequest', + full_name='google.pubsub.v1.AcknowledgeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.AcknowledgeRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.AcknowledgeRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2561, + serialized_end=2620, +) + + +_STREAMINGPULLREQUEST = _descriptor.Descriptor( + name='StreamingPullRequest', + full_name='google.pubsub.v1.StreamingPullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.StreamingPullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids', index=3, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream_ack_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2623, + serialized_end=2787, +) + + +_STREAMINGPULLRESPONSE = _descriptor.Descriptor( + name='StreamingPullResponse', + full_name='google.pubsub.v1.StreamingPullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.StreamingPullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2789, + serialized_end=2874, +) + + +_CREATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='CreateSnapshotRequest', + full_name='google.pubsub.v1.CreateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.CreateSnapshotRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.CreateSnapshotRequest.subscription', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2876, + serialized_end=2935, +) + + +_UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='UpdateSnapshotRequest', + full_name='google.pubsub.v1.UpdateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.UpdateSnapshotRequest.snapshot', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSnapshotRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2937, + serialized_end=3055, +) + + +_SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Snapshot.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Snapshot.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Snapshot.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + +_SNAPSHOT = _descriptor.Descriptor( + name='Snapshot', + full_name='google.pubsub.v1.Snapshot', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Snapshot.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Snapshot.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='expire_time', full_name='google.pubsub.v1.Snapshot.expire_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Snapshot.labels', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_SNAPSHOT_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3058, + serialized_end=3249, +) + + +_LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( + name='ListSnapshotsRequest', + full_name='google.pubsub.v1.ListSnapshotsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSnapshotsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSnapshotsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSnapshotsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3251, + serialized_end=3329, +) + + +_LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( + name='ListSnapshotsResponse', + full_name='google.pubsub.v1.ListSnapshotsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshots', full_name='google.pubsub.v1.ListSnapshotsResponse.snapshots', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSnapshotsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3331, + serialized_end=3426, +) + + +_DELETESNAPSHOTREQUEST = _descriptor.Descriptor( + name='DeleteSnapshotRequest', + full_name='google.pubsub.v1.DeleteSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.DeleteSnapshotRequest.snapshot', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3428, + serialized_end=3469, +) + + +_SEEKREQUEST = _descriptor.Descriptor( + name='SeekRequest', + full_name='google.pubsub.v1.SeekRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.SeekRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='time', full_name='google.pubsub.v1.SeekRequest.time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.SeekRequest.snapshot', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='target', full_name='google.pubsub.v1.SeekRequest.target', + index=0, containing_type=None, fields=[]), + ], + serialized_start=3471, + serialized_end=3580, +) + + +_SEEKRESPONSE = _descriptor.Descriptor( + name='SeekResponse', + full_name='google.pubsub.v1.SeekResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3582, + serialized_end=3596, +) + +_TOPIC_LABELSENTRY.containing_type = _TOPIC +_TOPIC.fields_by_name['labels'].message_type = _TOPIC_LABELSENTRY +_PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE +_PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY +_PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_UPDATETOPICREQUEST.fields_by_name['topic'].message_type = _TOPIC +_UPDATETOPICREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_PUBLISHREQUEST.fields_by_name['messages'].message_type = _PUBSUBMESSAGE +_LISTTOPICSRESPONSE.fields_by_name['topics'].message_type = _TOPIC +_SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION +_SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG +_SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_SUBSCRIPTION.fields_by_name['labels'].message_type = _SUBSCRIPTION_LABELSENTRY +_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG +_PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY +_RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['subscription'].message_type = _SUBSCRIPTION +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTSUBSCRIPTIONSRESPONSE.fields_by_name['subscriptions'].message_type = _SUBSCRIPTION +_MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG +_PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_UPDATESNAPSHOTREQUEST.fields_by_name['snapshot'].message_type = _SNAPSHOT +_UPDATESNAPSHOTREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT +_SNAPSHOT.fields_by_name['expire_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SNAPSHOT.fields_by_name['labels'].message_type = _SNAPSHOT_LABELSENTRY +_LISTSNAPSHOTSRESPONSE.fields_by_name['snapshots'].message_type = _SNAPSHOT +_SEEKREQUEST.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['time']) +_SEEKREQUEST.fields_by_name['time'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['snapshot']) +_SEEKREQUEST.fields_by_name['snapshot'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC +DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE +DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST +DESCRIPTOR.message_types_by_name['UpdateTopicRequest'] = _UPDATETOPICREQUEST +DESCRIPTOR.message_types_by_name['PublishRequest'] = _PUBLISHREQUEST +DESCRIPTOR.message_types_by_name['PublishResponse'] = _PUBLISHRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicsRequest'] = _LISTTOPICSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicsResponse'] = _LISTTOPICSRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsRequest'] = _LISTTOPICSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsResponse'] = _LISTTOPICSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteTopicRequest'] = _DELETETOPICREQUEST +DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name['PushConfig'] = _PUSHCONFIG +DESCRIPTOR.message_types_by_name['ReceivedMessage'] = _RECEIVEDMESSAGE +DESCRIPTOR.message_types_by_name['GetSubscriptionRequest'] = _GETSUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['UpdateSubscriptionRequest'] = _UPDATESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsRequest'] = _LISTSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsResponse'] = _LISTSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSubscriptionRequest'] = _DELETESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ModifyPushConfigRequest'] = _MODIFYPUSHCONFIGREQUEST +DESCRIPTOR.message_types_by_name['PullRequest'] = _PULLREQUEST +DESCRIPTOR.message_types_by_name['PullResponse'] = _PULLRESPONSE +DESCRIPTOR.message_types_by_name['ModifyAckDeadlineRequest'] = _MODIFYACKDEADLINEREQUEST +DESCRIPTOR.message_types_by_name['AcknowledgeRequest'] = _ACKNOWLEDGEREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullRequest'] = _STREAMINGPULLREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullResponse'] = _STREAMINGPULLRESPONSE +DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['UpdateSnapshotRequest'] = _UPDATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT +DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST +DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSnapshotRequest'] = _DELETESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['SeekRequest'] = _SEEKREQUEST +DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE + +Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _TOPIC_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) + )) + , + DESCRIPTOR = _TOPIC, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A topic resource. + + + Attributes: + name: + The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters (``[A-Za-z]``), + numbers (``[0-9]``), dashes (``-``), underscores (``_``), + periods (``.``), tildes (``~``), plus (``+``) or percent signs + (``%``). It must be between 3 and 255 characters in length, + and it must not start with ``"goog"``. + labels: + User labels. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) + )) +_sym_db.RegisterMessage(Topic) +_sym_db.RegisterMessage(Topic.LabelsEntry) + +PubsubMessage = _reflection.GeneratedProtocolMessageType('PubsubMessage', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUBSUBMESSAGE_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) + )) + , + DESCRIPTOR = _PUBSUBMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message data and its attributes. The message payload must not be + empty; it must contain either a non-empty data field, or at least one + attribute. + + + Attributes: + data: + The message payload. + attributes: + Optional attributes for this message. + message_id: + ID of this message, assigned by the server when the message is + published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` call. + publish_time: + The time at which the message was published, populated by the + server when it receives the ``Publish`` call. It must not be + populated by the publisher in a ``Publish`` call. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) + )) +_sym_db.RegisterMessage(PubsubMessage) +_sym_db.RegisterMessage(PubsubMessage.AttributesEntry) + +GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetTopic method. + + + Attributes: + topic: + The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) + )) +_sym_db.RegisterMessage(GetTopicRequest) + +UpdateTopicRequest = _reflection.GeneratedProtocolMessageType('UpdateTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateTopic method. + + + Attributes: + topic: + The topic to update. + update_mask: + Indicates which fields in the provided topic to update. Must + be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) + )) +_sym_db.RegisterMessage(UpdateTopicRequest) + +PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Publish method. + + + Attributes: + topic: + The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages: + The messages to publish. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) + )) +_sym_db.RegisterMessage(PublishRequest) + +PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Publish`` method. + + + Attributes: + message_ids: + The server-assigned ID of each published message, in the same + order as the messages in the request. IDs are guaranteed to be + unique within the topic. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) + )) +_sym_db.RegisterMessage(PublishResponse) + +ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopics`` method. + + + Attributes: + project: + The name of the cloud project that topics belong to. Format is + ``projects/{project}``. + page_size: + Maximum number of topics to return. + page_token: + The value returned by the last ``ListTopicsResponse``; + indicates that this is a continuation of a prior + ``ListTopics`` call, and that the system should return the + next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) + )) +_sym_db.RegisterMessage(ListTopicsRequest) + +ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopics`` method. + + + Attributes: + topics: + The resulting topics. + next_page_token: + If not empty, indicates that there may be more topics that + match the request; this value should be passed in a new + ``ListTopicsRequest``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) + )) +_sym_db.RegisterMessage(ListTopicsResponse) + +ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopicSubscriptions`` method. + + + Attributes: + topic: + The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size: + Maximum number of subscription names to return. + page_token: + The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsRequest) + +ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopicSubscriptions`` method. + + + Attributes: + subscriptions: + The names of the subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListTopicSubscriptionsRequest`` to get more subscriptions. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsResponse) + +DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteTopic`` method. + + + Attributes: + topic: + Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) + )) +_sym_db.RegisterMessage(DeleteTopicRequest) + +Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTION_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) + )) + , + DESCRIPTOR = _SUBSCRIPTION, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A subscription resource. + + + Attributes: + name: + The name of the subscription. It must have the format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain only + letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and 255 + characters in length, and it must not start with ``"goog"``. + topic: + The name of the topic from which this subscription is + receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this field + will be ``_deleted-topic_`` if the topic has been deleted. + push_config: + If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that + the subscriber will pull and ack messages using API methods. + ack_deadline_seconds: + This value is the maximum time after a subscriber receives a + message before the subscriber should acknowledge the message. + After message delivery but before the ack deadline expires and + before the message is acknowledged, it is an outstanding + message and will not be delivered again during that time (on a + best-effort basis). For pull subscriptions, this value is + used as the initial value for the ack deadline. To override + this value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using pull. The minimum + custom deadline you can specify is 10 seconds. The maximum + custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + For push delivery, this value is also used to set the request + timeout for the call to the push endpoint. If the subscriber + never acknowledges the message, the Pub/Sub system will + eventually redeliver the message. + retain_acked_messages: + Indicates whether to retain acknowledged messages. If true, + then messages are not expunged from the subscription's + backlog, even if they are acknowledged, until they fall out of + the ``message_retention_duration`` window. + message_retention_duration: + How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + labels: + User labels. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) + )) +_sym_db.RegisterMessage(Subscription) +_sym_db.RegisterMessage(Subscription.LabelsEntry) + +PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUSHCONFIG_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) + )) + , + DESCRIPTOR = _PUSHCONFIG, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Configuration for a push delivery endpoint. + + + Attributes: + push_endpoint: + A URL locating the endpoint to which messages should be + pushed. For example, a Webhook endpoint might use + "https://example.com/push". + attributes: + Endpoint configuration attributes. Every endpoint has a set + of API supported attributes that can be used to control + different aspects of the message delivery. The currently + supported attribute is ``x-goog-version``, which you can use + to change the format of the pushed message. This attribute + indicates the version of the data expected by the endpoint. + This controls the shape of the pushed message (i.e., its + fields and metadata). The endpoint version is based on the + version of the Pub/Sub API. If not present during the + ``CreateSubscription`` call, it will default to the version of + the API used to make such call. If not present during a + ``ModifyPushConfig`` call, its value will not be changed. + ``GetSubscription`` calls will always return a valid version, + even if the subscription was created without this attribute. + The possible values for this attribute are: - ``v1beta1``: + uses the push format defined in the v1beta1 Pub/Sub API. - + ``v1`` or ``v1beta2``: uses the push format defined in the v1 + Pub/Sub API. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) + )) +_sym_db.RegisterMessage(PushConfig) +_sym_db.RegisterMessage(PushConfig.AttributesEntry) + +ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( + DESCRIPTOR = _RECEIVEDMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message and its corresponding acknowledgment ID. + + + Attributes: + ack_id: + This ID can be used to acknowledge the received message. + message: + The message. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) + )) +_sym_db.RegisterMessage(ReceivedMessage) + +GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetSubscription method. + + + Attributes: + subscription: + The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) + )) +_sym_db.RegisterMessage(GetSubscriptionRequest) + +UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSubscription method. + + + Attributes: + subscription: + The updated subscription object. + update_mask: + Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) + )) +_sym_db.RegisterMessage(UpdateSubscriptionRequest) + +ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSubscriptions`` method. + + + Attributes: + project: + The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size: + Maximum number of subscriptions to return. + page_token: + The value returned by the last ``ListSubscriptionsResponse``; + indicates that this is a continuation of a prior + ``ListSubscriptions`` call, and that the system should return + the next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListSubscriptionsRequest) + +ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSubscriptions`` method. + + + Attributes: + subscriptions: + The subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListSubscriptionsRequest`` to get more subscriptions. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListSubscriptionsResponse) + +DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the DeleteSubscription method. + + + Attributes: + subscription: + The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) + )) +_sym_db.RegisterMessage(DeleteSubscriptionRequest) + +ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyPushConfig method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config: + The push configuration for future deliveries. An empty + ``pushConfig`` indicates that the Pub/Sub system should stop + pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) + )) +_sym_db.RegisterMessage(ModifyPushConfigRequest) + +PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( + DESCRIPTOR = _PULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Pull`` method. + + + Attributes: + subscription: + The subscription from which messages should be pulled. Format + is ``projects/{project}/subscriptions/{sub}``. + return_immediately: + If this field set to true, the system will respond immediately + even if it there are no messages available to return in the + ``Pull`` response. Otherwise, the system may wait (for a + bounded amount of time) until at least one message is + available, rather than returning no messages. The client may + cancel the request if it does not wish to wait any longer for + the response. + max_messages: + The maximum number of messages returned for this request. The + Pub/Sub system may return fewer than the number specified. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) + )) +_sym_db.RegisterMessage(PullRequest) + +PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( + DESCRIPTOR = _PULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Pull`` method. + + + Attributes: + received_messages: + Received Pub/Sub messages. The Pub/Sub system will return zero + messages if there are no more available in the backlog. The + Pub/Sub system may return fewer than the ``maxMessages`` + requested even if there are more messages available in the + backlog. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) + )) +_sym_db.RegisterMessage(PullResponse) + +ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyAckDeadline method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgment IDs. + ack_deadline_seconds: + The new ack deadline with respect to the time this request was + sent to the Pub/Sub system. For example, if the value is 10, + the new ack deadline will expire 10 seconds after the + ``ModifyAckDeadline`` call was made. Specifying zero may + immediately make the message available for another pull + request. The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 + minutes). + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) + )) +_sym_db.RegisterMessage(ModifyAckDeadlineRequest) + +AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( + DESCRIPTOR = _ACKNOWLEDGEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Acknowledge method. + + + Attributes: + subscription: + The subscription whose message is being acknowledged. Format + is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + The acknowledgment ID for the messages being acknowledged that + was returned by the Pub/Sub system in the ``Pull`` response. + Must not be empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) + )) +_sym_db.RegisterMessage(AcknowledgeRequest) + +StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``StreamingPull`` streaming RPC method. This request is + used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to the + server. + + + Attributes: + subscription: + The subscription for which to initialize the new stream. This + must be provided in the first request on the stream, and must + not be set in subsequent requests from client to server. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgement IDs for acknowledging previously + received messages (received on this stream or a different + stream). If an ack ID has expired, the corresponding message + may be redelivered later. Acknowledging a message more than + once will not result in an error. If the acknowledgement ID is + malformed, the stream will be aborted with status + ``INVALID_ARGUMENT``. + modify_deadline_seconds: + The list of new ack deadlines for the IDs listed in + ``modify_deadline_ack_ids``. The size of this list must be the + same as the size of ``modify_deadline_ack_ids``. If it differs + the stream will be aborted with ``INVALID_ARGUMENT``. Each + element in this list is applied to the element in the same + position in ``modify_deadline_ack_ids``. The new ack deadline + is with respect to the time this request was sent to the + Pub/Sub system. Must be >= 0. For example, if the value is 10, + the new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids: + List of acknowledgement IDs whose deadline will be modified + based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if + the processing was interrupted. + stream_ack_deadline_seconds: + The ack deadline to use for the stream. This must be provided + in the first request on the stream, but it can also be updated + on subsequent requests from client to server. The minimum + deadline you can specify is 10 seconds. The maximum deadline + you can specify is 600 seconds (10 minutes). + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) + )) +_sym_db.RegisterMessage(StreamingPullRequest) + +StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + + Attributes: + received_messages: + Received Pub/Sub messages. This will not be empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) + )) +_sym_db.RegisterMessage(StreamingPullResponse) + +CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``CreateSnapshot`` method. + + + Attributes: + name: + Optional user-provided name for this snapshot. If the name is + not provided in the request, the server will assign a random + name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. Format is + ``projects/{project}/snapshots/{snap}``. + subscription: + The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + (a) The existing backlog on the subscription. More precisely, + this is defined as the messages in the subscription's backlog + that are unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the successful + completion of the CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) + )) +_sym_db.RegisterMessage(CreateSnapshotRequest) + +UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType('UpdateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSnapshot method. + + + Attributes: + snapshot: + The updated snpashot object. + update_mask: + Indicates which fields in the provided snapshot to update. + Must be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) + )) +_sym_db.RegisterMessage(UpdateSnapshotRequest) + +Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SNAPSHOT_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) + )) + , + DESCRIPTOR = _SNAPSHOT, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A snapshot resource. + + + Attributes: + name: + The name of the snapshot. + topic: + The name of the topic from which this snapshot is retaining + messages. + expire_time: + The snapshot is guaranteed to exist up until this time. A + newly-created snapshot expires no later than 7 days from the + time of its creation. Its exact lifetime is determined at + creation by the existing backlog in the source subscription. + Specifically, the lifetime of the snapshot is ``7 days - (age + of oldest unacked message in the subscription)``. For example, + consider a subscription whose oldest unacked message is 3 days + old. If a snapshot is created from this subscription, the + snapshot -- which will always capture this 3-day-old backlog + as long as the snapshot exists -- will expire in 4 days. + labels: + User labels. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) + )) +_sym_db.RegisterMessage(Snapshot) +_sym_db.RegisterMessage(Snapshot.LabelsEntry) + +ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSnapshots`` method. + + + Attributes: + project: + The name of the cloud project that snapshots belong to. Format + is ``projects/{project}``. + page_size: + Maximum number of snapshots to return. + page_token: + The value returned by the last ``ListSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListSnapshots`` call, and that the system should return the + next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) + )) +_sym_db.RegisterMessage(ListSnapshotsRequest) + +ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSnapshots`` method. + + + Attributes: + snapshots: + The resulting snapshots. + next_page_token: + If not empty, indicates that there may be more snapshot that + match the request; this value should be passed in a new + ``ListSnapshotsRequest``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) + )) +_sym_db.RegisterMessage(ListSnapshotsResponse) + +DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteSnapshot`` method. + + + Attributes: + snapshot: + The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) + )) +_sym_db.RegisterMessage(DeleteSnapshotRequest) + +SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( + DESCRIPTOR = _SEEKREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Seek`` method. + + + Attributes: + subscription: + The subscription to affect. + time: + The time to seek to. Messages retained in the subscription + that were published before this time are marked as + acknowledged, and messages retained in the subscription that + were published after this time are marked as unacknowledged. + Note that this operation affects only those messages retained + in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). + For example, if ``time`` corresponds to a point before the + message retention window (or to a point before the system's + notion of the subscription creation time), only retained + messages will be marked as unacknowledged, and already- + expunged messages will not be restored. + snapshot: + The snapshot to seek to. The snapshot's topic must be the same + as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) + )) +_sym_db.RegisterMessage(SeekRequest) + +SeekResponse = _reflection.GeneratedProtocolMessageType('SeekResponse', (_message.Message,), dict( + DESCRIPTOR = _SEEKRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) + )) +_sym_db.RegisterMessage(SeekResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1')) +_TOPIC_LABELSENTRY.has_options = True +_TOPIC_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True +_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SUBSCRIPTION_LABELSENTRY.has_options = True +_SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUSHCONFIG_ATTRIBUTESENTRY.has_options = True +_PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SNAPSHOT_LABELSENTRY.has_options = True +_SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=Subscription.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=GetSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=UpdateSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=ListSubscriptionsRequest.SerializeToString, + response_deserializer=ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=PullRequest.SerializeToString, + response_deserializer=PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=StreamingPullRequest.SerializeToString, + response_deserializer=StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=ListSnapshotsRequest.SerializeToString, + response_deserializer=ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=CreateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=UpdateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=SeekRequest.SerializeToString, + response_deserializer=SeekResponse.FromString, + ) + + + class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=Subscription.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=GetSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=UpdateSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=ListSubscriptionsRequest.FromString, + response_serializer=ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=PullRequest.FromString, + response_serializer=PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=StreamingPullRequest.FromString, + response_serializer=StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=ListSnapshotsRequest.FromString, + response_serializer=ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=CreateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=UpdateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=SeekRequest.FromString, + response_serializer=SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=Topic.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=UpdateTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=PublishRequest.SerializeToString, + response_deserializer=PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=GetTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=ListTopicsRequest.SerializeToString, + response_deserializer=ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=Topic.FromString, + response_serializer=Topic.SerializeToString, + ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=UpdateTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=PublishRequest.FromString, + response_serializer=PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=GetTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=ListTopicsRequest.FromString, + response_serializer=ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=ListTopicSubscriptionsRequest.FromString, + response_serializer=ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaSubscriberServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaSubscriberStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSubscription.future = None + def GetSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration details of a subscription. + """ + raise NotImplementedError() + GetSubscription.future = None + def UpdateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateSubscription.future = None + def ListSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching subscriptions. + """ + raise NotImplementedError() + ListSubscriptions.future = None + def DeleteSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + raise NotImplementedError() + DeleteSubscription.future = None + def ModifyAckDeadline(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + raise NotImplementedError() + ModifyAckDeadline.future = None + def Acknowledge(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + raise NotImplementedError() + Acknowledge.future = None + def Pull(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + raise NotImplementedError() + Pull.future = None + def StreamingPull(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + raise NotImplementedError() + def ModifyPushConfig(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + raise NotImplementedError() + ModifyPushConfig.future = None + def ListSnapshots(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the existing snapshots. + """ + raise NotImplementedError() + ListSnapshots.future = None + def CreateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSnapshot.future = None + def UpdateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateSnapshot.future = None + def DeleteSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + raise NotImplementedError() + DeleteSnapshot.future = None + def Seek(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + raise NotImplementedError() + Seek.future = None + + + def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): face_utilities.unary_unary_inline(servicer.Acknowledge), + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): face_utilities.unary_unary_inline(servicer.CreateSnapshot), + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): face_utilities.unary_unary_inline(servicer.CreateSubscription), + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): face_utilities.unary_unary_inline(servicer.DeleteSnapshot), + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): face_utilities.unary_unary_inline(servicer.DeleteSubscription), + ('google.pubsub.v1.Subscriber', 'GetSubscription'): face_utilities.unary_unary_inline(servicer.GetSubscription), + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): face_utilities.unary_unary_inline(servicer.ListSnapshots), + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): face_utilities.unary_unary_inline(servicer.ListSubscriptions), + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): face_utilities.unary_unary_inline(servicer.ModifyAckDeadline), + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): face_utilities.unary_unary_inline(servicer.ModifyPushConfig), + ('google.pubsub.v1.Subscriber', 'Pull'): face_utilities.unary_unary_inline(servicer.Pull), + ('google.pubsub.v1.Subscriber', 'Seek'): face_utilities.unary_unary_inline(servicer.Seek), + ('google.pubsub.v1.Subscriber', 'StreamingPull'): face_utilities.stream_stream_inline(servicer.StreamingPull), + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): face_utilities.unary_unary_inline(servicer.UpdateSnapshot), + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): face_utilities.unary_unary_inline(servicer.UpdateSubscription), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.FromString, + } + cardinalities = { + 'Acknowledge': cardinality.Cardinality.UNARY_UNARY, + 'CreateSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'CreateSubscription': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSubscription': cardinality.Cardinality.UNARY_UNARY, + 'GetSubscription': cardinality.Cardinality.UNARY_UNARY, + 'ListSnapshots': cardinality.Cardinality.UNARY_UNARY, + 'ListSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ModifyAckDeadline': cardinality.Cardinality.UNARY_UNARY, + 'ModifyPushConfig': cardinality.Cardinality.UNARY_UNARY, + 'Pull': cardinality.Cardinality.UNARY_UNARY, + 'Seek': cardinality.Cardinality.UNARY_UNARY, + 'StreamingPull': cardinality.Cardinality.STREAM_STREAM, + 'UpdateSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'UpdateSubscription': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Subscriber', cardinalities, options=stub_options) + + + class BetaPublisherServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaPublisherStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates the given topic with the given name. + """ + raise NotImplementedError() + CreateTopic.future = None + def UpdateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateTopic.future = None + def Publish(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + raise NotImplementedError() + Publish.future = None + def GetTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration of a topic. + """ + raise NotImplementedError() + GetTopic.future = None + def ListTopics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching topics. + """ + raise NotImplementedError() + ListTopics.future = None + def ListTopicSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the name of the subscriptions for this topic. + """ + raise NotImplementedError() + ListTopicSubscriptions.future = None + def DeleteTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + raise NotImplementedError() + DeleteTopic.future = None + + + def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): face_utilities.unary_unary_inline(servicer.CreateTopic), + ('google.pubsub.v1.Publisher', 'DeleteTopic'): face_utilities.unary_unary_inline(servicer.DeleteTopic), + ('google.pubsub.v1.Publisher', 'GetTopic'): face_utilities.unary_unary_inline(servicer.GetTopic), + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): face_utilities.unary_unary_inline(servicer.ListTopicSubscriptions), + ('google.pubsub.v1.Publisher', 'ListTopics'): face_utilities.unary_unary_inline(servicer.ListTopics), + ('google.pubsub.v1.Publisher', 'Publish'): face_utilities.unary_unary_inline(servicer.Publish), + ('google.pubsub.v1.Publisher', 'UpdateTopic'): face_utilities.unary_unary_inline(servicer.UpdateTopic), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.FromString, + } + cardinalities = { + 'CreateTopic': cardinality.Cardinality.UNARY_UNARY, + 'DeleteTopic': cardinality.Cardinality.UNARY_UNARY, + 'GetTopic': cardinality.Cardinality.UNARY_UNARY, + 'ListTopicSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ListTopics': cardinality.Cardinality.UNARY_UNARY, + 'Publish': cardinality.Cardinality.UNARY_UNARY, + 'UpdateTopic': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Publisher', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py new file mode 100644 index 000000000000..06dd470470d8 --- /dev/null +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py @@ -0,0 +1,509 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.pubsub.v1.pubsub_pb2 as google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.FromString, + ) + + +class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/pubsub/google/cloud/pubsub.py b/pubsub/google/cloud/pubsub.py new file mode 100644 index 000000000000..bf094f6cf03a --- /dev/null +++ b/pubsub/google/cloud/pubsub.py @@ -0,0 +1,26 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import PublisherClient +from google.cloud.pubsub_v1 import SubscriberClient +from google.cloud.pubsub_v1 import types + + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', + 'types', +) diff --git a/pubsub/google/cloud/pubsub/__init__.py b/pubsub/google/cloud/pubsub/__init__.py deleted file mode 100644 index 070e8243bf2b..000000000000 --- a/pubsub/google/cloud/pubsub/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google Cloud Pubsub API wrapper. - -The main concepts with this API are: - -- :class:`~google.cloud.pubsub.topic.Topic` represents an endpoint to which - messages can be published using the Cloud Storage Pubsub API. - -- :class:`~google.cloud.pubsub.subscription.Subscription` represents a named - subscription (either pull or push) to a topic. -""" - - -from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-pubsub').version - -from google.cloud.pubsub.client import Client -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -__all__ = ['__version__', 'Client', 'Subscription', 'Topic'] diff --git a/pubsub/google/cloud/pubsub/_gax.py b/pubsub/google/cloud/pubsub/_gax.py deleted file mode 100644 index 35e56717b3c2..000000000000 --- a/pubsub/google/cloud/pubsub/_gax.py +++ /dev/null @@ -1,802 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GAX wrapper for Pubsub API requests.""" - -import functools - -from google.api.core import page_iterator -from google.cloud.gapic.pubsub.v1.publisher_client import PublisherClient -from google.cloud.gapic.pubsub.v1.subscriber_client import SubscriberClient -from google.gax import CallOptions -from google.gax import INITIAL_PAGE -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from google.protobuf.json_format import MessageToDict -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PubsubMessage -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig -from grpc import insecure_channel -from grpc import StatusCode - -from google.cloud._helpers import _to_bytes -from google.cloud._helpers import _pb_timestamp_to_rfc3339 -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud._helpers import make_secure_channel -from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import NotFound -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -_CONFLICT_ERROR_CODES = ( - StatusCode.FAILED_PRECONDITION, StatusCode.ALREADY_EXISTS) - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type gax_api: :class:`.publisher_client.PublisherClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_topics(self, project, page_size=0, page_token=None): - """List topics for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_topics( - path, page_size=page_size, options=options) - return page_iterator._GAXIterator( - self._client, page_iter, _item_to_topic) - - def topic_create(self, topic_path): - """API call: create a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the topic already - exists - """ - try: - topic_pb = self._gax_api.create_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: - raise Conflict(topic_path) - raise - return {'name': topic_pb.name} - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - try: - topic_pb = self._gax_api.get_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return {'name': topic_pb.name} - - def topic_delete(self, topic_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - """ - try: - self._gax_api.delete_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - def topic_publish(self, topic_path, messages, timeout=30): - """API call: publish one or more messages to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :type timeout: int - :param timeout: (Optional) Timeout seconds. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - options = CallOptions(is_bundling=False, timeout=timeout) - message_pbs = [_message_pb_from_mapping(message) - for message in messages] - try: - result = self._gax_api.publish(topic_path, message_pbs, - options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return result.message_ids - - def topic_list_subscriptions(self, topic, page_size=0, page_token=None): - """API call: list subscriptions bound to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - :raises: :exc:`~google.cloud.exceptions.NotFound` if the topic does - not exist. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - topic_path = topic.full_name - try: - page_iter = self._gax_api.list_topic_subscriptions( - topic_path, page_size=page_size, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - iterator = page_iterator._GAXIterator( - self._client, page_iter, _item_to_subscription_for_topic) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type gax_api: :class:`.publisher_client.SubscriberClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_subscriptions(self, project, page_size=0, page_token=None): - """List subscriptions for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_subscriptions( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return page_iterator._GAXIterator( - self._client, page_iter, item_to_value) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - if push_endpoint is not None: - push_config = PushConfig(push_endpoint=push_endpoint) - else: - push_config = None - - if message_retention_duration is not None: - message_retention_duration = _timedelta_to_duration_pb( - message_retention_duration) - - try: - sub_pb = self._gax_api.create_subscription( - subscription_path, topic_path, - push_config=push_config, ack_deadline_seconds=ack_deadline, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - except GaxError as exc: - if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: - raise Conflict(topic_path) - raise - return MessageToDict(sub_pb) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - try: - sub_pb = self._gax_api.get_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(sub_pb) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - try: - self._gax_api.delete_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - push_config = PushConfig(push_endpoint=push_endpoint) - try: - self._gax_api.modify_push_config(subscription_path, push_config) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to pull from, in - format ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - try: - response_pb = self._gax_api.pull( - subscription_path, max_messages, - return_immediately=return_immediately) - except GaxError as exc: - code = exc_to_code(exc.cause) - if code == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - elif code == StatusCode.DEADLINE_EXCEEDED: - # NOTE: The JSON-over-HTTP API returns a 200 with an empty - # response when ``return_immediately`` is ``False``, so - # we "mutate" the gRPC error into a non-error to conform. - if not return_immediately: - return [] - raise - return [_received_message_pb_to_mapping(rmpb) - for rmpb in response_pb.received_messages] - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - try: - self._gax_api.acknowledge(subscription_path, ack_ids) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - try: - self._gax_api.modify_ack_deadline( - subscription_path, ack_ids, ack_deadline) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: :class:`.timestamp_pb2.Timestamp` - :param time: The time to seek to. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - try: - self._gax_api.seek(subscription_path, time=time, snapshot=snapshot) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def list_snapshots(self, project, page_size=0, page_token=None): - """List snapshots for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_snapshots( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return page_iterator._GAXIterator( - self._client, page_iter, item_to_value) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the snapshot - already exists - :raises: :exc:`google.cloud.exceptions.NotFound` if the subscription - does not exist - """ - try: - snapshot_pb = self._gax_api.create_snapshot( - snapshot_path, subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: - raise Conflict(snapshot_path) - elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(snapshot_pb) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :raises: :exc:`google.cloud.exceptions.NotFound` if the snapshot does - not exist - """ - try: - self._gax_api.delete_snapshot(snapshot_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(snapshot_path) - raise - - -def _message_pb_from_mapping(message): - """Helper for :meth:`_PublisherAPI.topic_publish`. - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return PubsubMessage(data=_to_bytes(message['data']), - attributes=message['attributes']) - - -def _message_pb_to_mapping(message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'messageId': message_pb.message_id, - 'data': message_pb.data, - 'attributes': message_pb.attributes, - 'publishTime': _pb_timestamp_to_rfc3339(message_pb.publish_time), - } - - -def _received_message_pb_to_mapping(received_message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'ackId': received_message_pb.ack_id, - 'message': _message_pb_to_mapping( - received_message_pb.message), - } - - -def make_gax_publisher_api(credentials=None, host=None): - """Create an instance of the GAX Publisher API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.publisher_client.PublisherClient` - :returns: A publisher API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - PublisherClient.SERVICE_ADDRESS) - return PublisherClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def make_gax_subscriber_api(credentials=None, host=None): - """Create an instance of the GAX Subscriber API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.subscriber_client.SubscriberClient` - :returns: A subscriber API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - SubscriberClient.SERVICE_ADDRESS) - return SubscriberClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def _item_to_topic(iterator, resource): - """Convert a protobuf topic to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: :class:`.pubsub_pb2.Topic` - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr( - {'name': resource.name}, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, sub_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Subscription` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(sub_pb) - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, snapshot_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Snapshot` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(snapshot_pb) - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/pubsub/google/cloud/pubsub/_helpers.py b/pubsub/google/cloud/pubsub/_helpers.py deleted file mode 100644 index 2f021f20ab3e..000000000000 --- a/pubsub/google/cloud/pubsub/_helpers.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper functions for shared behavior.""" - -import re - -from google.cloud._helpers import _name_from_project_path - - -_TOPIC_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /topics/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -_SUBSCRIPTION_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /subscriptions/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -def topic_name_from_path(path, project): - """Validate a topic URI path and get the topic name. - - :type path: str - :param path: URI path for a topic API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: Topic name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _TOPIC_TEMPLATE) - - -def subscription_name_from_path(path, project): - """Validate a subscription URI path and get the subscription name. - - :type path: str - :param path: URI path for a subscription API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: subscription name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _SUBSCRIPTION_TEMPLATE) diff --git a/pubsub/google/cloud/pubsub/_http.py b/pubsub/google/cloud/pubsub/_http.py deleted file mode 100644 index 5173b4095ca8..000000000000 --- a/pubsub/google/cloud/pubsub/_http.py +++ /dev/null @@ -1,797 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Interact with Google Cloud Pub/Sub via JSON-over-HTTP.""" - -import base64 -import copy -import functools -import os - -from google.api.core import page_iterator -from google.cloud import _http -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud.environment_vars import PUBSUB_EMULATOR - -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - - -PUBSUB_API_HOST = 'pubsub.googleapis.com' -"""Pub / Sub API request host.""" - -_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) - - -class Connection(_http.JSONConnection): - """A connection to Google Cloud Pub/Sub via the JSON REST API. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns the current connection. - """ - - API_BASE_URL = 'https://' + PUBSUB_API_HOST - """The base of the API call URL.""" - - API_VERSION = 'v1' - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' - """A template for the URL of a particular API call.""" - - _EXTRA_HEADERS = { - _http.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - - def __init__(self, client): - super(Connection, self).__init__(client) - emulator_host = os.getenv(PUBSUB_EMULATOR) - if emulator_host is None: - self.host = self.__class__.API_BASE_URL - self.api_base_url = self.__class__.API_BASE_URL - self.in_emulator = False - else: - self.host = emulator_host - self.api_base_url = 'http://' + emulator_host - self.in_emulator = True - - def build_api_url(self, path, query_params=None, - api_base_url=None, api_version=None): - """Construct an API url given a few components, some optional. - - Typically, you shouldn't need to use this method. - - :type path: str - :param path: The path to the resource. - - :type query_params: dict or list - :param query_params: A dictionary of keys and values (or list of - key-value pairs) to insert into the query - string of the URL. - - :type api_base_url: str - :param api_base_url: The base URL for the API endpoint. - Typically you won't have to provide this. - - :type api_version: str - :param api_version: The version of the API to call. - Typically you shouldn't provide this and instead - use the default for the library. - - :rtype: str - :returns: The URL assembled from the pieces provided. - """ - if api_base_url is None: - api_base_url = self.api_base_url - return super(Connection, self.__class__).build_api_url( - path, query_params=query_params, - api_base_url=api_base_url, api_version=api_version) - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_topics(self, project, page_size=None, page_token=None): - """API call: list topics for a given project - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current client. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/topics' % (project,) - - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_topic, - items_key='topics', - page_token=page_token, - extra_params=extra_params) - - def topic_create(self, topic_path): - """API call: create a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: the fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='PUT', path='/%s' % (topic_path,)) - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='GET', path='/%s' % (topic_path,)) - - def topic_delete(self, topic_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - """ - self.api_request(method='DELETE', path='/%s' % (topic_path,)) - - def topic_publish(self, topic_path, messages): - """API call: publish one or more messages to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - """ - messages_to_send = copy.deepcopy(messages) - _transform_messages_base64(messages_to_send, _base64_unicode) - data = {'messages': messages_to_send} - response = self.api_request( - method='POST', path='/%s:publish' % (topic_path,), data=data) - return response['messageIds'] - - def topic_list_subscriptions(self, topic, page_size=None, page_token=None): - """API call: list subscriptions bound to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: list of strings - :returns: fully-qualified names of subscriptions for the supplied - topic. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/%s/subscriptions' % (topic.full_name,) - - iterator = page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_subscription_for_topic, - items_key='subscriptions', - page_token=page_token, - extra_params=extra_params) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_subscriptions(self, project, page_size=None, page_token=None): - """API call: list subscriptions for a given project - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/subscriptions' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=item_to_value, - items_key='subscriptions', - page_token=page_token, - extra_params=extra_params) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - resource = {'topic': topic_path} - - if ack_deadline is not None: - resource['ackDeadlineSeconds'] = ack_deadline - - if push_endpoint is not None: - resource['pushConfig'] = {'pushEndpoint': push_endpoint} - - if retain_acked_messages is not None: - resource['retainAckedMessages'] = retain_acked_messages - - if message_retention_duration is not None: - pb = _timedelta_to_duration_pb(message_retention_duration) - resource['messageRetentionDuration'] = { - 'seconds': pb.seconds, - 'nanos': pb.nanos - } - - return self.api_request(method='PUT', path=path, data=resource) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - return self.api_request(method='GET', path=path) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - path = '/%s' % (subscription_path,) - self.api_request(method='DELETE', path=path) - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - path = '/%s:modifyPushConfig' % (subscription_path,) - resource = {'pushConfig': {'pushEndpoint': push_endpoint}} - self.api_request(method='POST', path=path, data=resource) - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - path = '/%s:pull' % (subscription_path,) - data = { - 'returnImmediately': return_immediately, - 'maxMessages': max_messages, - } - response = self.api_request(method='POST', path=path, data=data) - messages = response.get('receivedMessages', ()) - _transform_messages_base64(messages, base64.b64decode, 'message') - return messages - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - path = '/%s:acknowledge' % (subscription_path,) - data = { - 'ackIds': ack_ids, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - path = '/%s:modifyAckDeadline' % (subscription_path,) - data = { - 'ackIds': ack_ids, - 'ackDeadlineSeconds': ack_deadline, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: str - :param time: The time to seek to, in RFC 3339 format. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - path = '/%s:seek' % (subscription_path,) - data = {} - if time is not None: - data['time'] = time - if snapshot is not None: - data['snapshot'] = snapshot - self.api_request(method='POST', path=path, data=data) - - def list_snapshots(self, project, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/snapshots' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=item_to_value, - items_key='snapshots', - page_token=page_token, - extra_params=extra_params) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - """ - path = '/%s' % (snapshot_path,) - data = {'subscription': subscription_path} - return self.api_request(method='PUT', path=path, data=data) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - """ - path = '/%s' % (snapshot_path,) - self.api_request(method='DELETE', path=path) - - -class _IAMPolicyAPI(object): - """Helper mapping IAM policy-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self.api_request = client._connection.api_request - - def get_iam_policy(self, target_path): - """API call: fetch the IAM policy for the target - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - path = '/%s:getIamPolicy' % (target_path,) - return self.api_request(method='GET', path=path) - - def set_iam_policy(self, target_path, policy): - """API call: update the IAM policy for the target - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :type policy: dict - :param policy: the new policy resource. - - :rtype: dict - :returns: the resource returned by the ``setIamPolicy`` API request. - """ - wrapped = {'policy': policy} - path = '/%s:setIamPolicy' % (target_path,) - return self.api_request(method='POST', path=path, data=wrapped) - - def test_iam_permissions(self, target_path, permissions): - """API call: test permissions - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - :type target_path: str - :param target_path: the path of the target object. - - :type permissions: list of string - :param permissions: the permissions to check - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - wrapped = {'permissions': permissions} - path = '/%s:testIamPermissions' % (target_path,) - resp = self.api_request(method='POST', path=path, data=wrapped) - return resp.get('permissions', []) - - -def _base64_unicode(value): - """Helper to base64 encode and make JSON serializable. - - :type value: str - :param value: String value to be base64 encoded and made serializable. - - :rtype: str - :returns: Base64 encoded string/unicode value. - """ - as_bytes = base64.b64encode(value) - return as_bytes.decode('ascii') - - -def _transform_messages_base64(messages, transform, key=None): - """Helper for base64 encoding and decoding messages. - - :type messages: list - :param messages: List of dictionaries with message data. - - :type transform: :class:`~types.FunctionType` - :param transform: Function to encode/decode the message data. - - :type key: str - :param key: Index to access messages. - """ - for message in messages: - if key is not None: - message = message[key] - if 'data' in message: - message['data'] = transform(message['data']) - - -def _item_to_topic(iterator, resource): - """Convert a JSON topic to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr(resource, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/pubsub/google/cloud/pubsub/client.py b/pubsub/google/cloud/pubsub/client.py deleted file mode 100644 index 0dc9b8fb6f38..000000000000 --- a/pubsub/google/cloud/pubsub/client.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google Cloud Pub/Sub API.""" - -import os - -from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.pubsub._http import Connection -from google.cloud.pubsub._http import _PublisherAPI as JSONPublisherAPI -from google.cloud.pubsub._http import _SubscriberAPI as JSONSubscriberAPI -from google.cloud.pubsub._http import _IAMPolicyAPI -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -try: - from google.cloud.pubsub._gax import _PublisherAPI as GAXPublisherAPI - from google.cloud.pubsub._gax import _SubscriberAPI as GAXSubscriberAPI - from google.cloud.pubsub._gax import make_gax_publisher_api - from google.cloud.pubsub._gax import make_gax_subscriber_api -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - GAXPublisherAPI = None - GAXSubscriberAPI = None - make_gax_publisher_api = None - make_gax_subscriber_api = None -else: - _HAVE_GRPC = True - - -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC - - -class Client(ClientWithProject): - """Client to bundle configuration needed for API requests. - - :type project: str - :param project: the project which the client acts on behalf of. Will be - passed when creating a topic. If not passed, - falls back to the default inferred from the environment. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``_http`` object is - passed), falls back to the default inferred from the - environment. - - :type _http: :class:`~requests.Session` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`requests.Session.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type _use_grpc: bool - :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` - environment variable. - This parameter should be considered private, and could - change in the future. - """ - - _publisher_api = None - _subscriber_api = None - _iam_policy_api = None - - SCOPE = ('https://www.googleapis.com/auth/pubsub', - 'https://www.googleapis.com/auth/cloud-platform') - """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" - - def __init__(self, project=None, credentials=None, - _http=None, _use_grpc=None): - super(Client, self).__init__( - project=project, credentials=credentials, _http=_http) - self._connection = Connection(self) - if _use_grpc is None: - self._use_grpc = _USE_GRPC - else: - self._use_grpc = _use_grpc - - @property - def publisher_api(self): - """Helper for publisher-related API calls.""" - if self._publisher_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_publisher_api( - host=self._connection.host) - else: - generated = make_gax_publisher_api( - credentials=self._credentials) - self._publisher_api = GAXPublisherAPI(generated, self) - else: - self._publisher_api = JSONPublisherAPI(self) - return self._publisher_api - - @property - def subscriber_api(self): - """Helper for subscriber-related API calls.""" - if self._subscriber_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_subscriber_api( - host=self._connection.host) - else: - generated = make_gax_subscriber_api( - credentials=self._credentials) - self._subscriber_api = GAXSubscriberAPI(generated, self) - else: - self._subscriber_api = JSONSubscriberAPI(self) - return self._subscriber_api - - @property - def iam_policy_api(self): - """Helper for IAM policy-related API calls.""" - if self._iam_policy_api is None: - self._iam_policy_api = _IAMPolicyAPI(self) - return self._iam_policy_api - - def list_topics(self, page_size=None, page_token=None): - """List topics for the project associated with this client. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_list_topics] - :end-before: [END client_list_topics] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - api = self.publisher_api - return api.list_topics( - self.project, page_size, page_token) - - def list_subscriptions(self, page_size=None, page_token=None): - """List subscriptions for the project associated with this client. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_list_subscriptions] - :end-before: [END client_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current client. - """ - api = self.subscriber_api - return api.list_subscriptions( - self.project, page_size, page_token) - - def list_snapshots(self, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - api = self.subscriber_api - return api.list_snapshots( - self.project, page_size, page_token) - - def topic(self, name, timestamp_messages=False): - """Creates a topic bound to the current client. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_topic] - :end-before: [END client_topic] - :dedent: 4 - - :type name: str - :param name: the name of the topic to be constructed. - - :type timestamp_messages: bool - :param timestamp_messages: To be passed to ``Topic`` constructor. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic created with the current client. - """ - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current client. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_subscription] - :end-before: [END client_subscription] - :dedent: 4 - - :type name: str - :param name: the name of the subscription to be constructed. - - :type ack_deadline: int - :param ack_deadline: (Optional) The deadline (in seconds) by which - messages pulledfrom the back-end must be - acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. If unset, defaults to 7 days. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: Subscription created with the current client. - """ - return Subscription( - name, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, client=self) diff --git a/pubsub/google/cloud/pubsub/iam.py b/pubsub/google/cloud/pubsub/iam.py deleted file mode 100644 index 7dce1c2c4cfa..000000000000 --- a/pubsub/google/cloud/pubsub/iam.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""PubSub API IAM policy definitions - -For allowed roles / permissions, see: -https://cloud.google.com/pubsub/access_control#permissions -""" - -import warnings - -# pylint: disable=unused-import -from google.cloud.iam import OWNER_ROLE # noqa - backward compat -from google.cloud.iam import EDITOR_ROLE # noqa - backward compat -from google.cloud.iam import VIEWER_ROLE # noqa - backward compat -# pylint: enable=unused-import -from google.cloud.iam import Policy as _BasePolicy -from google.cloud.iam import _ASSIGNMENT_DEPRECATED_MSG - -# Pubsub-specific IAM roles - -PUBSUB_ADMIN_ROLE = 'roles/pubsub.admin' -"""Role implying all rights to an object.""" - -PUBSUB_EDITOR_ROLE = 'roles/pubsub.editor' -"""Role implying rights to modify an object.""" - -PUBSUB_VIEWER_ROLE = 'roles/pubsub.viewer' -"""Role implying rights to access an object.""" - -PUBSUB_PUBLISHER_ROLE = 'roles/pubsub.publisher' -"""Role implying rights to publish to a topic.""" - -PUBSUB_SUBSCRIBER_ROLE = 'roles/pubsub.subscriber' -"""Role implying rights to subscribe to a topic.""" - - -# Pubsub-specific permissions - -PUBSUB_TOPICS_CONSUME = 'pubsub.topics.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_TOPICS_CREATE = 'pubsub.topics.create' -"""Permission: create topics.""" - -PUBSUB_TOPICS_DELETE = 'pubsub.topics.delete' -"""Permission: delete topics.""" - -PUBSUB_TOPICS_GET = 'pubsub.topics.get' -"""Permission: retrieve topics.""" - -PUBSUB_TOPICS_GET_IAM_POLICY = 'pubsub.topics.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_TOPICS_LIST = 'pubsub.topics.list' -"""Permission: list topics.""" - -PUBSUB_TOPICS_SET_IAM_POLICY = 'pubsub.topics.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_CONSUME = 'pubsub.subscriptions.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_SUBSCRIPTIONS_CREATE = 'pubsub.subscriptions.create' -"""Permission: create subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_DELETE = 'pubsub.subscriptions.delete' -"""Permission: delete subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET = 'pubsub.subscriptions.get' -"""Permission: retrieve subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY = 'pubsub.subscriptions.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_LIST = 'pubsub.subscriptions.list' -"""Permission: list subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_SET_IAM_POLICY = 'pubsub.subscriptions.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_UPDATE = 'pubsub.subscriptions.update' -"""Permission: update subscriptions.""" - - -class Policy(_BasePolicy): - """IAM Policy / Bindings. - - See - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding - """ - _OWNER_ROLES = (OWNER_ROLE, PUBSUB_ADMIN_ROLE) - """Roles mapped onto our ``owners`` attribute.""" - - _EDITOR_ROLES = (EDITOR_ROLE, PUBSUB_EDITOR_ROLE) - """Roles mapped onto our ``editors`` attribute.""" - - _VIEWER_ROLES = (VIEWER_ROLE, PUBSUB_VIEWER_ROLE) - """Roles mapped onto our ``viewers`` attribute.""" - - @property - def publishers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_PUBLISHER_ROLE, ())) - - @publishers.setter - def publishers(self, value): - """Update publishers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'publishers', PUBSUB_PUBLISHER_ROLE), - DeprecationWarning) - self[PUBSUB_PUBLISHER_ROLE] = value - - @property - def subscribers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_SUBSCRIBER_ROLE, ())) - - @subscribers.setter - def subscribers(self, value): - """Update subscribers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'subscribers', PUBSUB_SUBSCRIBER_ROLE), - DeprecationWarning) - self[PUBSUB_SUBSCRIBER_ROLE] = value diff --git a/pubsub/google/cloud/pubsub/message.py b/pubsub/google/cloud/pubsub/message.py deleted file mode 100644 index e2153d5cb14f..000000000000 --- a/pubsub/google/cloud/pubsub/message.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -from google.cloud._helpers import _rfc3339_to_datetime - - -class Message(object): - """Messages can be published to a topic and received by subscribers. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage - - :type data: bytes - :param data: the payload of the message. - - :type message_id: str - :param message_id: An ID assigned to the message by the API. - - :type attributes: dict - :param attributes: - (Optional) Extra metadata associated by the publisher with the message. - """ - _service_timestamp = None - - def __init__(self, data, message_id, attributes=None): - self.data = data - self.message_id = message_id - self._attributes = attributes - - @property - def attributes(self): - """Lazily-constructed attribute dictionary.""" - if self._attributes is None: - self._attributes = {} - return self._attributes - - @property - def timestamp(self): - """Return sortable timestamp from attributes, if passed. - - Allows sorting messages in publication order (assuming consistent - clocks across all publishers). - - :rtype: :class:`datetime.datetime` - :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp - :raises: ValueError if timestamp not in ``attributes``, or if it does - not match the RFC 3339 format. - """ - stamp = self.attributes.get('timestamp') - if stamp is None: - raise ValueError('No timestamp') - return _rfc3339_to_datetime(stamp) - - @property - def service_timestamp(self): - """Return server-set timestamp. - - :rtype: str - :returns: timestamp (in UTC timezone) in RFC 3339 format - """ - return self._service_timestamp - - @classmethod - def from_api_repr(cls, api_repr): - """Factory: construct message from API representation. - - :type api_repr: dict - :param api_repr: (Optional) The API representation of the message - - :rtype: :class:`Message` - :returns: The message created from the response. - """ - data = api_repr.get('data', b'') - instance = cls( - data=data, message_id=api_repr['messageId'], - attributes=api_repr.get('attributes')) - instance._service_timestamp = api_repr.get('publishTime') - return instance diff --git a/pubsub/google/cloud/pubsub/snapshot.py b/pubsub/google/cloud/pubsub/snapshot.py deleted file mode 100644 index 599cd05d8765..000000000000 --- a/pubsub/google/cloud/pubsub/snapshot.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Snapshots.""" - -from google.cloud.pubsub._helpers import topic_name_from_path - - -class Snapshot(object): - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.snapshots.topic`` when topic has been deleted.""" - - def __init__(self, name, subscription=None, topic=None, client=None): - - num_kwargs = len( - [param for param in (subscription, topic, client) if param]) - if num_kwargs != 1: - raise TypeError( - "Pass only one of 'subscription', 'topic', 'client'.") - - self.name = name - self.topic = topic or getattr(subscription, 'topic', None) - self._subscription = subscription - self._client = client or getattr( - subscription, '_client', None) or topic._client - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a subscription given its API representation - - :type resource: dict - :param resource: snapshot resource representation returned from the - API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration. - - :type subscriptions: dict - :param subscriptions: - (Optional) A Subscription to which this snapshot belongs. If not - passed, the subscription will have a newly-created subscription. - Must have the same topic as the snapshot. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - if topic is None: - return cls(name, client=client) - return cls(name, topic=topic) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/snapshots/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - if not self._subscription: - raise RuntimeError( - 'Cannot create a snapshot not bound to a subscription') - - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_create(self.full_name, self._subscription.full_name) - - def delete(self, client=None): - """API call: delete the snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_delete(self.full_name) diff --git a/pubsub/google/cloud/pubsub/subscription.py b/pubsub/google/cloud/pubsub/subscription.py deleted file mode 100644 index 86ca1f97c230..000000000000 --- a/pubsub/google/cloud/pubsub/subscription.py +++ /dev/null @@ -1,590 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Subscriptions.""" - -import datetime - -from google.cloud.exceptions import NotFound -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.message import Message - - -class Subscription(object): - """Subscriptions receive messages published to their topics. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions - - :type name: str - :param name: the name of the subscription. - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: - (Optional) the topic to which the subscription belongs; if ``None``, - the subscription's topic has been deleted. - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. If - not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: - (Optional) The client to use. If not passed, falls back to the - ``client`` stored on the topic. - """ - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.subscriptions.topic`` when topic has been deleted. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions#Subscription.FIELDS.topic - """ - - def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, message_retention_duration=None, - client=None): - - if client is None and topic is None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - if client is not None and topic is not None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - self.name = name - self.topic = topic - self._client = client or topic._client - self.ack_deadline = ack_deadline - self.push_endpoint = push_endpoint - self.retain_acked_messages = retain_acked_messages - self.message_retention_duration = message_retention_duration - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for a topic. - - :type topics: dict - :param topics: - (Optional) A mapping of topic names -> topics. If not passed, the - subscription will have a newly-created topic. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - ack_deadline = resource.get('ackDeadlineSeconds') - push_config = resource.get('pushConfig', {}) - push_endpoint = push_config.get('pushEndpoint') - retain_acked_messages = resource.get('retainAckedMessages') - resource_duration = resource.get('duration', {}) - message_retention_duration = datetime.timedelta( - seconds=resource_duration.get('seconds', 0), - microseconds=resource_duration.get('nanos', 0) / 1000) - if topic is None: - return cls(name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - client=client) - return cls(name, topic=topic, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/subscriptions/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def auto_ack(self, return_immediately=False, max_messages=1, client=None): - """:class:`AutoAck` factory - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - - :rtype: :class:`AutoAck` - :returns: the instance created for the given ``ack_id`` and ``message`` - """ - return AutoAck(self, return_immediately, max_messages, client) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the subscription via a PUT request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_create] - :end-before: [END subscription_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_create( - self.full_name, self.topic.full_name, - ack_deadline=self.ack_deadline, push_endpoint=self.push_endpoint, - retain_acked_messages=self.retain_acked_messages, - message_retention_duration=self.message_retention_duration) - - def exists(self, client=None): - """API call: test existence of the subscription via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_exists] - :end-before: [END subscription_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: bool - :returns: Boolean indicating existence of the subscription. - """ - client = self._require_client(client) - api = client.subscriber_api - try: - api.subscription_get(self.full_name) - except NotFound: - return False - else: - return True - - def reload(self, client=None): - """API call: sync local subscription configuration via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :attr:`ack_deadline` and :attr:`push_endpoint` might never have - been set locally, or might have been updated by another client. This - method fetches their values from the server. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_reload] - :end-before: [END subscription_reload] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - data = api.subscription_get(self.full_name) - self.ack_deadline = data.get('ackDeadlineSeconds') - push_config = data.get('pushConfig', {}) - self.push_endpoint = push_config.get('pushEndpoint') - if self.topic is None and 'topic' in data: - topic_name = topic_name_from_path(data['topic'], client.project) - self.topic = client.topic(topic_name) - - def delete(self, client=None): - """API call: delete the subscription via a DELETE request. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_delete] - :end-before: [END subscription_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_delete(self.full_name) - - def modify_push_configuration(self, push_endpoint, client=None): - """API call: update the push endpoint for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_push_pull] - :end-before: [END subscription_push_pull] - - .. literalinclude:: snippets.py - :start-after: [START subscription_pull_push] - :end-before: [END subscription_pull_push] - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If None, the application must pull - messages. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_push_config(self.full_name, push_endpoint) - self.push_endpoint = push_endpoint - - def pull(self, return_immediately=False, max_messages=1, client=None): - """API call: retrieve messages for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_pull] - :end-before: [END subscription_pull] - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: list of (ack_id, message) tuples - :returns: sequence of tuples: ``ack_id`` is the ID to be used in a - subsequent call to :meth:`acknowledge`, and ``message`` - is an instance of - :class:`~google.cloud.pubsub.message.Message`. - """ - client = self._require_client(client) - api = client.subscriber_api - response = api.subscription_pull( - self.full_name, return_immediately, max_messages) - return [(info['ackId'], Message.from_api_repr(info['message'])) - for info in response] - - def acknowledge(self, ack_ids, client=None): - """API call: acknowledge retrieved messages for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/acknowledge - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_acknowledge] - :end-before: [END subscription_acknowledge] - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_acknowledge(self.full_name, ack_ids) - - def modify_ack_deadline(self, ack_ids, ack_deadline, client=None): - """API call: update acknowledgement deadline for a retrieved message. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being updated - - :type ack_deadline: int - :param ack_deadline: new deadline for the message, in seconds - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_ack_deadline( - self.full_name, ack_ids, ack_deadline) - - def snapshot(self, name, client=None): - """Creates a snapshot of this subscription. - - :type name: str - :param name: the name of the subscription - - :rtype: :class:`Snapshot` - :returns: The snapshot created with the passed in arguments. - """ - return Snapshot(name, subscription=self) - - def seek_snapshot(self, snapshot, client=None): - """API call: seek a subscription to a given snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type snapshot: :class:`Snapshot` - :param snapshot: The snapshot to seek to. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_seek(self.full_name, snapshot=snapshot.full_name) - - def seek_timestamp(self, timestamp, client=None): - """API call: seek a subscription to a given point in time - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type time: :class:`datetime.datetime` - :param time: The time to seek to. - """ - client = self._require_client(client) - timestamp = _datetime_to_rfc3339(timestamp) - api = client.subscriber_api - api.subscription_seek(self.full_name, time=timestamp) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_get_iam_policy] - :end-before: [END subscription_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_set_iam_policy] - :end-before: [END subscription_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_check_iam_permissions] - :end-before: [END subscription_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class AutoAck(dict): - """Wrapper for :meth:`Subscription.pull` results. - - Mapping, tracks messages still-to-be-acknowledged. - - When used as a context manager, acknowledges all messages still in the - mapping on `__exit__`. When processing the pulled messages, application - code MUST delete messages from the :class:`AutoAck` mapping which are not - successfully processed, e.g.: - - .. code-block: python - - with AutoAck(subscription) as ack: # calls ``subscription.pull`` - for ack_id, message in ack.items(): - try: - do_something_with(message): - except: - del ack[ack_id] - - :type subscription: :class:`Subscription` - :param subscription: subscription to be pulled. - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - """ - def __init__(self, subscription, - return_immediately=False, max_messages=1, client=None): - super(AutoAck, self).__init__() - self._subscription = subscription - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - - def __enter__(self): - items = self._subscription.pull( - self._return_immediately, self._max_messages, self._client) - self.update(items) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if self: - self._subscription.acknowledge(list(self), self._client) diff --git a/pubsub/google/cloud/pubsub/topic.py b/pubsub/google/cloud/pubsub/topic.py deleted file mode 100644 index 92f453bd2b2b..000000000000 --- a/pubsub/google/cloud/pubsub/topic.py +++ /dev/null @@ -1,551 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -import base64 -import json -import time - -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud._helpers import _NOW -from google.cloud._helpers import _to_bytes -from google.cloud.exceptions import NotFound -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.subscription import Subscription - - -class Topic(object): - """Topics are targets to which messages can be published. - - Subscribers then receive those messages. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics - - :type name: str - :param name: the name of the topic - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: A client which holds credentials and project configuration - for the topic (which requires a project). - - :type timestamp_messages: bool - :param timestamp_messages: If true, the topic will add a ``timestamp`` key - to the attributes of each published message: - the value will be an RFC 3339 timestamp. - """ - def __init__(self, name, client, timestamp_messages=False): - self.name = name - self._client = client - self.timestamp_messages = timestamp_messages - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current topic. - - Example: pull-mode subcription, default parameter values - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_defaults] - :end-before: [END topic_subscription_defaults] - - Example: pull-mode subcription, override ``ack_deadline`` default - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_ack90] - :end-before: [END topic_subscription_ack90] - - Example: push-mode subcription - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_push] - :end-before: [END topic_subscription_push] - - :type name: str - :param name: the name of the subscription - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: :class:`Subscription` - :returns: The subscription created with the passed in arguments. - """ - return Subscription( - name, self, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for the topic. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic parsed from ``resource``. - :raises: :class:`ValueError` if ``client`` is not ``None`` and the - project from the resource does not agree with the project - from the client. - """ - topic_name = topic_name_from_path(resource['name'], client.project) - return cls(topic_name, client=client) - - @property - def project(self): - """Project bound to the topic.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in topic / subscription APIs""" - return 'projects/%s/topics/%s' % (self.project, self.name) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the topic via a PUT request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_create] - :end-before: [END topic_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_create(topic_path=self.full_name) - - def exists(self, client=None): - """API call: test for the existence of the topic via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_exists] - :end-before: [END topic_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: bool - :returns: Boolean indicating existence of the topic. - """ - client = self._require_client(client) - api = client.publisher_api - - try: - api.topic_get(topic_path=self.full_name) - except NotFound: - return False - else: - return True - - def delete(self, client=None): - """API call: delete the topic via a DELETE request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_delete] - :end-before: [END topic_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_delete(topic_path=self.full_name) - - def _timestamp_message(self, attrs): - """Add a timestamp to ``attrs``, if the topic is so configured. - - If ``attrs`` already has the key, do nothing. - - Helper method for ``publish``/``Batch.publish``. - """ - if self.timestamp_messages and 'timestamp' not in attrs: - attrs['timestamp'] = _datetime_to_rfc3339(_NOW()) - - def publish(self, message, client=None, **attrs): - """API call: publish a message to a topic via a POST request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - Example without message attributes: - - .. literalinclude:: snippets.py - :start-after: [START topic_publish_simple_message] - :end-before: [END topic_publish_simple_message] - - With message attributes: - - .. literalinclude:: snippets.py - :start-after: [START topic_publish_message_with_attrs] - :end-before: [END topic_publish_message_with_attrs] - - :type message: bytes - :param message: the message payload - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - - :rtype: str - :returns: message ID assigned by the server to the published message - """ - client = self._require_client(client) - api = client.publisher_api - - self._timestamp_message(attrs) - message_data = {'data': message, 'attributes': attrs} - message_ids = api.topic_publish(self.full_name, [message_data]) - return message_ids[0] - - def batch(self, client=None, **kwargs): - """Return a batch to use as a context manager. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_batch] - :end-before: [END topic_batch] - - .. note:: - - The only API request happens during the ``__exit__()`` of the topic - used as a context manager, and only if the block exits without - raising an exception. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type kwargs: dict - :param kwargs: Keyword arguments passed to the - :class:`~google.cloud.pubsub.topic.Batch` constructor. - - :rtype: :class:`Batch` - :returns: A batch to use as a context manager. - """ - client = self._require_client(client) - return Batch(self, client, **kwargs) - - def list_subscriptions(self, page_size=None, page_token=None, client=None): - """List subscriptions for the project associated with this client. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_list_subscriptions] - :end-before: [END topic_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - return api.topic_list_subscriptions(self, page_size, page_token) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the topic. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_get_iam_policy] - :end-before: [END topic_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the topic. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_set_iam_policy] - :end-before: [END topic_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_check_iam_permissions] - :end-before: [END topic_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class Batch(object): - """Context manager: collect messages to publish via a single API call. - - Helper returned by :meth:Topic.batch - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: the topic being published - - :param client: The client to use. - :type client: :class:`google.cloud.pubsub.client.Client` - - :param max_interval: The maximum interval, in seconds, before the batch - will automatically commit. Note that this does not - run a background loop; it just checks when each - message is published. Therefore, this is intended - for situations where messages are published at - reasonably regular intervals. Defaults to infinity - (off). - :type max_interval: float - - :param max_messages: The maximum number of messages to hold in the batch - before automatically commiting. Defaults to infinity - (off). - :type max_messages: float - - :param max_size: The maximum size that the serialized messages can be - before automatically commiting. Defaults to 9 MB - (slightly less than the API limit). - :type max_size: int - """ - _INFINITY = float('inf') - - def __init__(self, topic, client, max_interval=_INFINITY, - max_messages=_INFINITY, max_size=1024 * 1024 * 9): - self.topic = topic - self.client = client - self.messages = [] - self.message_ids = [] - - # Set the autocommit rules. If the interval or number of messages - # is exceeded, then the .publish() method will imply a commit. - self._max_interval = max_interval - self._max_messages = max_messages - self._max_size = max_size - - # Set up the initial state, initializing messages, the starting - # timestamp, etc. - self._reset_state() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - - def __iter__(self): - return iter(self.message_ids) - - def _reset_state(self): - """Reset the state of this batch.""" - - del self.messages[:] - self._start_timestamp = time.time() - self._current_size = 0 - - def publish(self, message, **attrs): - """Emulate publishing a message, but save it. - - :type message: bytes - :param message: the message payload - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - """ - self.topic._timestamp_message(attrs) - - # Append the message to the list of messages.. - item = {'attributes': attrs, 'data': message} - self.messages.append(item) - - # Determine the approximate size of the message, and increment - # the current batch size appropriately. - encoded = base64.b64encode(_to_bytes(message)) - encoded += base64.b64encode( - json.dumps(attrs, ensure_ascii=False).encode('utf8'), - ) - self._current_size += len(encoded) - - # If too much time has elapsed since the first message - # was added, autocommit. - now = time.time() - if now - self._start_timestamp > self._max_interval: - self.commit() - return - - # If the number of messages on the list is greater than the - # maximum allowed, autocommit (with the batch's client). - if len(self.messages) >= self._max_messages: - self.commit() - return - - # If we have reached the max size, autocommit. - if self._current_size >= self._max_size: - self.commit() - return - - def commit(self, client=None): - """Send saved messages as a single API call. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - """ - if not self.messages: - return - - if client is None: - client = self.client - api = client.publisher_api - message_ids = api.topic_publish(self.topic.full_name, self.messages[:]) - self.message_ids.extend(message_ids) - self._reset_state() diff --git a/pubsub/google/cloud/pubsub_v1/__init__.py b/pubsub/google/cloud/pubsub_v1/__init__.py new file mode 100644 index 000000000000..21706f6eee5e --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import Client as PublisherClient +from google.cloud.pubsub_v1.subscriber import Client as SubscriberClient + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', + 'types', +) diff --git a/pubsub/google/cloud/pubsub_v1/_gapic.py b/pubsub/google/cloud/pubsub_v1/_gapic.py new file mode 100644 index 000000000000..79aac7de8941 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/_gapic.py @@ -0,0 +1,73 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import functools + + +def add_methods(source_class, blacklist=()): + """Add wrapped versions of the `api` member's methods to the class. + + Any methods passed in `blacklist` are not added. + Additionally, any methods explicitly defined on the wrapped class are + not added. + """ + def wrap(wrapped_fx): + """Wrap a GAPIC method; preserve its name and docstring.""" + # If this is a static or class method, then we need to *not* + # send self as the first argument. + # + # Similarly, for instance methods, we need to send self.api rather + # than self, since that is where the actual methods were declared. + instance_method = True + self = getattr(wrapped_fx, '__self__', None) + if issubclass(type(self), type): + instance_method = False + + # Okay, we have figured out what kind of method this is; send + # down the correct wrapper function. + if instance_method: + fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) + return functools.wraps(wrapped_fx)(fx) + fx = lambda self, *a, **kw: wrapped_fx(*a, **kw) + return functools.wraps(wrapped_fx)(fx) + + def actual_decorator(cls): + # Reflectively iterate over most of the methods on the source class + # (the GAPIC) and make wrapped versions available on this client. + for name in dir(source_class): + # Ignore all private and magic methods. + if name.startswith('_'): + continue + + # Ignore anything on our blacklist. + if name in blacklist: + continue + + # Retrieve the attribute, and ignore it if it is not callable. + attr = getattr(source_class, name) + if not callable(attr): + continue + + # Add a wrapper method to this object. + fx = wrap(getattr(source_class, name)) + setattr(cls, name, fx) + + # Return the augmented class. + return cls + + # Simply return the actual decorator; this is returned from this method + # and actually used to decorate the class. + return actual_decorator diff --git a/pubsub/google/cloud/pubsub_v1/publisher/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py new file mode 100644 index 000000000000..76d54649448f --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1.publisher.client import Client + + +__all__ = ( + 'Client', +) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py new file mode 100644 index 000000000000..61eea2bb9ad5 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -0,0 +1,147 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import abc +import enum + +import six + + +@six.add_metaclass(abc.ABCMeta) +class Batch(object): + """The base batching class for Pub/Sub publishing. + + Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based + on :class:`threading.Thread`, is fine for most cases, advanced + users may need to implement something based on a different concurrency + model. + + This class defines the interface for the Batch implementation; + subclasses may be passed as the ``batch_class`` argument to + :class:`~.pubsub_v1.client.PublisherClient`. + + The batching behavior works like this: When the + :class:`~.pubsub_v1.publisher.client.Client` is asked to publish a new + message, it requires a batch. The client will see if there is an + already-opened batch for the given topic; if there is, then the message + is sent to that batch. If there is not, then a new batch is created + and the message put there. + + When a new batch is created, it automatically starts a timer counting + down to the maximum latency before the batch should commit. + Essentially, if enough time passes, the batch automatically commits + regardless of how much is in it. However, if either the message count or + size thresholds are encountered first, then the batch will commit early. + """ + def __len__(self): + """Return the number of messages currently in the batch.""" + return len(self.messages) + + @property + @abc.abstractmethod + def messages(self): + """Return the messages currently in the batch. + + Returns: + Sequence: The messages currently in the batch. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. The ones represented in the + :class:`BaseBatch.Status` enum are special, but other statuses + are permitted. + """ + raise NotImplementedError + + def will_accept(self, message): + """Return True if the batch is able to accept the message. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + bool: Whether this batch can accept the message. + """ + # If this batch is not accepting messages generally, return False. + if self.status != BatchStatus.ACCEPTING_MESSAGES: + return False + + # If this batch can not hold the message in question, return False. + if self.size + message.ByteSize() > self.settings.max_bytes: + return False + + # Okay, everything is good. + return True + + @abc.abstractmethod + def publish(self, message): + """Publish a single message. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + This method is called by :meth:`~.PublisherClient.publish`. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the + :class:`concurrent.futures.Future` interface. + """ + raise NotImplementedError + + +class BatchStatus(object): + """An enum-like class representing valid statuses for a batch. + + It is acceptable for a class to use a status that is not on this + class; this represents the list of statuses where the existing + library hooks in functionality. + """ + ACCEPTING_MESSAGES = 'accepting messages' + ERROR = 'error' + SUCCESS = 'success' diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py new file mode 100644 index 000000000000..f5c08a76f315 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -0,0 +1,245 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import logging +import threading +import time + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher import futures +from google.cloud.pubsub_v1.publisher.batch import base + + +class Batch(base.Batch): + """A batch of messages. + + The batch is the internal group of messages which are either awaiting + publication or currently in-flight. + + A batch is automatically created by the PublisherClient when the first + message to be published is received; subsequent messages are added to + that batch until the process of actual publishing _starts_. + + Once this occurs, any new messages sent to :meth:`publish` open a new + batch. + + If you are using this library, you most likely do not need to instantiate + batch objects directly; they will be created for you. If you want to + change the actual batching settings, see the ``batching`` argument on + :class:`~.pubsub_v1.PublisherClient`. + + Any properties or methods on this class which are not defined in + :class:`~.pubsub_v1.publisher.batch.BaseBatch` should be considered + implementation details. + + Args: + client (~.pubsub_v1.PublisherClient): The publisher client used to + create this batch. + topic (str): The topic. The format for this is + ``projects/{project}/topics/{topic}``. + settings (~.pubsub_v1.types.BatchSettings): The settings for batch + publishing. These should be considered immutable once the batch + has been opened. + autocommit (bool): Whether to autocommit the batch when the time + has elapsed. Defaults to True unless ``settings.max_latency`` is + inf. + """ + def __init__(self, client, topic, settings, autocommit=True): + self._client = client + + # These objects are all communicated between threads; ensure that + # any writes to them are atomic. + self._futures = [] + self._messages = [] + self._size = 0 + self._settings = settings + self._status = base.BatchStatus.ACCEPTING_MESSAGES + self._topic = topic + + # If max latency is specified, start a thread to monitor the batch and + # commit when the max latency is reached. + self._thread = None + self._commit_lock = threading.Lock() + if autocommit and self._settings.max_latency < float('inf'): + self._thread = threading.Thread(target=self.monitor) + self._thread.start() + + @property + def client(self): + """~.pubsub_v1.client.PublisherClient: A publisher client.""" + return self._client + + @property + def messages(self): + """Sequence: The messages currently in the batch.""" + return self._messages + + @property + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + return self._settings + + @property + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + return self._size + + @property + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. + """ + return self._status + + def commit(self): + """Actually publish all of the messages on the active batch. + + This synchronously sets the batch status to in-flight, and then opens + a new thread, which handles actually sending the messages to Pub/Sub. + + .. note:: + + This method is non-blocking. It opens a new thread, which calls + :meth:`_commit`, which does block. + """ + # Set the status to in-flight synchronously, to ensure that + # this batch will necessarily not accept new messages. + # + # Yes, this is repeated in `_commit`, because that method is called + # directly by `monitor`. + self._status = 'in-flight' + + # Start a new thread to actually handle the commit. + commit_thread = threading.Thread(target=self._commit) + commit_thread.start() + + def _commit(self): + """Actually publish all of the messages on the active batch. + + This moves the batch out from being the active batch to an in-flight + batch on the publisher, and then the batch is discarded upon + completion. + + .. note:: + + This method blocks. The :meth:`commit` method is the non-blocking + version, which calls this one. + """ + with self._commit_lock: + # If, in the intervening period, the batch started to be committed, + # or completed a commit, then no-op at this point. + if self._status != base.BatchStatus.ACCEPTING_MESSAGES: + return + + # Update the status. + self._status = 'in-flight' + + # Sanity check: If there are no messages, no-op. + if not self._messages: + return + + # Begin the request to publish these messages. + # Log how long the underlying request takes. + start = time.time() + response = self.client.api.publish( + self._topic, + self.messages, + ) + end = time.time() + logging.getLogger().debug('gRPC Publish took {s} seconds.'.format( + s=end - start, + )) + + # We got a response from Pub/Sub; denote that we are processing. + self._status = 'processing results' + + # Sanity check: If the number of message IDs is not equal to the + # number of futures I have, then something went wrong. + if len(response.message_ids) != len(self._futures): + for future in self._futures: + future.set_exception(exceptions.PublishError( + 'Some messages were not successfully published.', + )) + return + + # Iterate over the futures on the queue and return the response + # IDs. We are trusting that there is a 1:1 mapping, and raise an + # exception if not. + self._status = base.BatchStatus.SUCCESS + for message_id, future in zip(response.message_ids, self._futures): + future.set_result(message_id) + + def monitor(self): + """Commit this batch after sufficient time has elapsed. + + This simply sleeps for ``self._settings.max_latency`` seconds, + and then calls commit unless the batch has already been committed. + """ + # Note: This thread blocks; it is up to the calling code to call it + # in a separate thread. + # + # Sleep for however long we should be waiting. + time.sleep(self._settings.max_latency) + + # Commit. + return self._commit() + + def publish(self, message): + """Publish a single message. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + This method is called by :meth:`~.PublisherClient.publish`. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + ~.pubsub_v1.publisher.futures.Future: An object conforming to + the :class:`concurrent.futures.Future` interface. + """ + # Coerce the type, just in case. + if not isinstance(message, types.PubsubMessage): + message = types.PubsubMessage(**message) + + # Add the size to the running total of the size, so we know + # if future messages need to be rejected. + self._size += message.ByteSize() + + # Store the actual message in the batch's message queue. + self._messages.append(message) + + # Return a Future. That future needs to be aware of the status + # of this batch. + f = futures.Future() + self._futures.append(f) + return f diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py new file mode 100644 index 000000000000..e80662a715ef --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -0,0 +1,161 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import copy +import pkg_resources +import threading + +import six + +from google.cloud.gapic.pubsub.v1 import publisher_client + +from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch import thread + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) +class Client(object): + """A publisher client for Google Cloud Pub/Sub. + + This creates an object that is capable of publishing messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The + settings for batch publishing. + batch_class (class): A class that describes how to handle + batches. You may subclass the + :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in + order to define your own batcher. This is primarily provided to + allow use of different concurrency models; the default + is based on :class:`threading.Thread`. + kwargs (dict): Any additional arguments provided are sent as keyword + arguments to the underlying + :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. + Generally, you should not need to set additional keyword arguments. + """ + def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = publisher_client.PublisherClient(**kwargs) + self.batch_settings = types.BatchSettings(*batch_settings) + + # The batches on the publisher client are responsible for holding + # messages. One batch exists for each topic. + self._batch_class = batch_class + self._batch_lock = threading.Lock() + self._batches = {} + + def batch(self, topic, message, create=True, autocommit=True): + """Return the current batch for the provided topic. + + This will create a new batch only if no batch currently exists. + + Args: + topic (str): A string representing the topic. + message (~google.cloud.pubsub_v1.types.PubsubMessage): The message + that will be committed. + create (bool): Whether to create a new batch if no batch is + found. Defaults to True. + autocommit (bool): Whether to autocommit this batch. + This is primarily useful for debugging. + + Returns: + ~.pubsub_v1.batch.Batch: The batch object. + """ + # If there is no matching batch yet, then potentially create one + # and place it on the batches dictionary. + with self._batch_lock: + batch = self._batches.get(topic, None) + if batch is None or not batch.will_accept(message): + if not create: + return None + batch = self._batch_class( + autocommit=autocommit, + client=self, + settings=self.batch_settings, + topic=topic, + ) + self._batches[topic] = batch + + # Simply return the appropriate batch. + return batch + + def publish(self, topic, data, **attrs): + """Publish a single message. + + .. note:: + Messages in Pub/Sub are blobs of bytes. They are *binary* data, + not text. You must send data as a bytestring + (``bytes`` in Python 3; ``str`` in Python 2), and this library + will raise an exception if you send a text string. + + The reason that this is so important (and why we do not try to + coerce for you) is because Pub/Sub is also platform independent + and there is no way to know how to decode messages properly on + the other side; therefore, encoding and decoding is a required + exercise for the developer. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + Example: + >>> from google.cloud.pubsub_v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'The rain in Wales falls mainly on the snails.' + >>> response = client.publish(topic, data, username='guido') + + Args: + topic (str): The topic to publish messages to. + data (bytes): A bytestring representing the message body. This + must be a bytestring. + attrs (Mapping[str, str]): A dictionary of attributes to be + sent as metadata. (These may be text strings or byte strings.) + + Returns: + ~concurrent.futures.Future: An object conforming to the + ``concurrent.futures.Future`` interface. + """ + # Sanity check: Is the data being sent as a bytestring? + # If it is literally anything else, complain loudly about it. + if not isinstance(data, six.binary_type): + raise TypeError('Data being published to Pub/Sub must be sent ' + 'as a bytestring.') + + # Coerce all attributes to text strings. + for k, v in copy.copy(attrs).items(): + if isinstance(v, six.text_type): + continue + if isinstance(v, six.binary_type): + attrs[k] = v.decode('utf-8') + continue + raise TypeError('All attributes being published to Pub/Sub must ' + 'be sent as text strings.') + + # Create the Pub/Sub message object. + message = types.PubsubMessage(data=data, attributes=attrs) + + # Delegate the publishing to the batch. + return self.batch(topic, message=message).publish(message) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py new file mode 100644 index 000000000000..bae090ceb9d7 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -0,0 +1,29 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent.futures import TimeoutError + +from google.api.core.exceptions import GoogleAPICallError + + +class PublishError(GoogleAPICallError): + pass + + +__all__ = ( + 'PublishError', + 'TimeoutError', +) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/futures.py b/pubsub/google/cloud/pubsub_v1/publisher/futures.py new file mode 100644 index 000000000000..cbc67d9e55c3 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -0,0 +1,169 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import threading + +import google.api.core.future +from google.cloud.pubsub_v1.publisher import exceptions + + +class Future(google.api.core.future.Future): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from asychronous Pub/Sub calls, and is the + interface to determine the status of those calls. + + This object should not be created directly, but is returned by other + methods in this library. + """ + def __init__(self): + self._callbacks = [] + self._result = None + self._exception = None + self._completed = threading.Event() + + def cancel(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def cancelled(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def running(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns True. + """ + return True + + def done(self): + """Return True if the publish has completed, False otherwise. + + This still returns True in failure cases; checking :meth:`result` or + :meth:`exception` is the canonical way to assess success or failure. + """ + return self._exception is not None or self._result is not None + + def result(self, timeout=None): + """Return the message ID, or raise an exception. + + This blocks until the message has successfully been published, and + returns the message ID. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Returns: + str: The message ID. + + Raises: + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying + call execution. + """ + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self._result + raise err + + def exception(self, timeout=None, _wait=1): + """Return the exception raised by the call, if any. + + This blocks until the message has successfully been published, and + returns the exception. If the call succeeded, return None. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + TimeoutError: If the request times out. + + Returns: + Exception: The exception raised by the call, if any. + """ + # Wait until the future is done. + if not self._completed.wait(timeout=timeout): + raise exceptions.TimeoutError('Timed out waiting for result.') + + # If the batch completed successfully, this should return None. + if self._result is not None: + return None + + # Okay, this batch had an error; this should return it. + return self._exception + + def add_done_callback(self, fn): + """Attach the provided callable to the future. + + The provided function is called, with this future as its only argument, + when the future finishes running. + """ + if self.done(): + fn(self) + self._callbacks.append(fn) + + def set_result(self, result): + """Set the result of the future to the provided result. + + Args: + result (str): The message ID. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_result can only be called once.') + + # Set the result and trigger the future. + self._result = result + self._trigger() + + def set_exception(self, exception): + """Set the result of the future to the given exception. + + Args: + exception (:exc:`Exception`): The exception raised. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_exception can only be called once.') + + # Set the exception and trigger the future. + self._exception = exception + self._trigger() + + def _trigger(self): + """Trigger all callbacks registered to this Future. + + This method is called internally by the batch once the batch + completes. + + Args: + message_id (str): The message ID, as a string. + """ + self._completed.set() + for callback in self._callbacks: + callback(self) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py new file mode 100644 index 000000000000..d98a7bb75be4 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1.subscriber.client import Client + + +__all__ = ( + 'Client', +) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py new file mode 100644 index 000000000000..9fb2567176bc --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -0,0 +1,267 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bidirectional Streaming Consumer. + +The goal here is to consume a bidirectional streaming RPC by fanning out the +responses received from the server to be processed and fanning in requests from +the response processors to be sent to the server through the request stream. +This module is a framework to deal with this pattern in a consistent way: + + * A :class:`Consumer` manages scheduling requests to a stream and consuming + responses from a stream. The Consumer takes the responses and schedules + them to be processed in callbacks using any + :class:`~concurrent.futures.Executor`. + * A :class:`Policy` which determines how the consumer calls the RPC and + processes responses, errors, and messages. + +The :class:`Policy` is the only class that's intended to be sub-classed here. +This would be implemented for every bidirectional streaming method. +How does this work? The first part of the implementation, fanning out +responses, its actually quite straightforward and can be done with just a +:class:`concurrent.futures.Executor`: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + } + +The challenge comes from the fact that in bidirectional streaming two more +things have to be done: + + 1. The consumer must maintain a long-running request generator. + 2. The consumer must provide some way for the response processor to queue + new requests. + +These are especially important because in the case of Pub/Sub you are +essentially streaming requests indefinitely and receiving responses +indefinitely. + +For the first challenge, we take advantage of the fact that gRPC runs the +request generator in its own thread. That thread can block, so we can use +a queue for that: + +.. graphviz:: + digraph response_flow { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + } + +The final piece of the puzzle, allowing things from anywhere to queue new +requests, it a bit more complex. If we were only dealing with threads, then the +response workers could just directly interact with the policy/consumer to +queue new requests: + +.. graphviz:: + digraph thread_only_requests { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "Consumer" [label="send_request", color="blue"] + } + +But, because this does not dictate any particular concurrent strategy for +dealing with the responses, it's possible that a response could be processed +in a different thread, process, or even on a different machine. Because of +this, we need an intermediary queue between the callbacks and the gRPC request +queue to bridge the "concurrecy gap". To pump items from the concurrecy-safe +queue into the gRPC request queue, we need another worker thread. Putting this +all together looks like this: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "Policy" -> "QueueCallbackThread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "callback_request_queue" [label="requests", color="blue"] + "callback_request_queue" -> "QueueCallbackThread" + [label="consumed by", color="blue"] + "QueueCallbackThread" -> "Consumer" + [label="send_response", color="blue"] + } + +This part is actually up to the Policy to enable. The consumer just provides a +thread-safe queue for requests. The :cls:`QueueCallbackThread` can be used by +the Policy implementation to spin up the worker thread to pump the +concurrency-safe queue. See the Pub/Sub subscriber implementation for an +example of this. +""" + +import logging +import queue +import threading + +from google.cloud.pubsub_v1.subscriber import _helper_threads + +_LOGGER = logging.getLogger(__name__) + + +class Consumer(object): + """Bi-directional streaming RPC consumer. + + This class coordinates the consumption of a bi-directional streaming RPC. + There is a bit of background information to know before understanding how + this class operates: + + 1. gRPC has its own background thread for dealing with I/O. + 2. gRPC consumes a streaming call's request generator in another + thread. + 3. If the request generator thread exits, gRPC will close the + connection. + + Because of (2) and (3), the consumer must always at least use threading + for some bookkeeping. No matter what, a thread will be created by gRPC to + generate requests. This thread is called the *request generator thread*. + Having the request generator thread allows the consumer to hold the stream + open indefinitely. Now gRPC will send responses as fast as the consumer can + ask for them. The consumer hands these off to the :cls:`Policy` via + :meth:`Policy.on_response`, which should not block. + + Finally, we do not want to block the main thread, so the consumer actually + invokes the RPC itself in a separate thread. This thread is called the + *response consumer helper thread*. + + So all in all there are three threads: + + 1. gRPC's internal I/O thread. + 2. The request generator thread, created by gRPC. + 3. The response consumer helper thread, created by the Consumer. + + In addition, the Consumer likely uses some sort of concurreny to prevent + blocking on processing responses. The Policy may also use another thread to + deal with pumping messages from an external queue into the request queue + here. + + It may seem strange to use threads for something "high performance" + considering the GIL. However, the threads here are not CPU bound. They are + simple threads that are blocked by I/O and generally just move around some + simple objects between queues. The overhead for these helper threads is + low. The Consumer and end-user can configure any sort of executor they want + for the actual processing of the responses, which may be CPU intensive. + """ + def __init__(self, policy): + """ + Args: + policy (Consumer): The consumer policy, which defines how + requests and responses are handled. + """ + self._policy = policy + self._request_queue = queue.Queue() + self._exiting = threading.Event() + + self.active = False + self.helper_threads = _helper_threads.HelperThreadRegistry() + """:cls:`_helper_threads.HelperThreads`: manages the helper threads. + The policy may use this to schedule its own helper threads. + """ + + def send_request(self, request): + """Queue a request to be sent to gRPC. + + Args: + request (Any): The request protobuf. + """ + self._request_queue.put(request) + + def _request_generator_thread(self): + """Generate requests for the stream. + + This blocks for new requests on the request queue and yields them to + gRPC. + """ + # First, yield the initial request. This occurs on every new + # connection, fundamentally including a resumed connection. + initial_request = self._policy.get_initial_request(ack_queue=True) + _LOGGER.debug('Sending initial request: {initial_request}'.format( + initial_request=initial_request, + )) + yield initial_request + + # Now yield each of the items on the request queue, and block if there + # are none. This can and must block to keep the stream open. + while True: + request = self._request_queue.get() + if request == _helper_threads.STOP: + _LOGGER.debug('Request generator signaled to stop.') + break + + _LOGGER.debug('Sending request: {}'.format(request)) + yield request + + def _blocking_consume(self): + """Consume the stream indefinitely.""" + while True: + # It is possible that a timeout can cause the stream to not + # exit cleanly when the user has called stop_consuming(). This + # checks to make sure we're not exiting before opening a new + # stream. + if self._exiting.is_set(): + _LOGGER.debug('Event signalled consumer exit.') + break + + request_generator = self._request_generator_thread() + response_generator = self._policy.call_rpc(request_generator) + try: + for response in response_generator: + _LOGGER.debug('Received response: {0}'.format(response)) + self._policy.on_response(response) + + # If the loop above exits without an exception, then the + # request stream terminated cleanly, which should only happen + # when it was signaled to do so by stop_consuming. In this + # case, break out of the while loop and exit this thread. + _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') + break + except KeyboardInterrupt: + self.stop_consuming() + except Exception as exc: + try: + self._policy.on_exception(exc) + except: + self.active = False + raise + + def start_consuming(self): + """Start consuming the stream.""" + self.active = True + self._exiting.clear() + self.helper_threads.start( + 'consume bidirectional stream', + self._request_queue, + self._blocking_consume, + ) + + def stop_consuming(self): + """Signal the stream to stop and block until it completes.""" + self.active = False + self._exiting.set() + self.helper_threads.stop_all() diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py new file mode 100644 index 000000000000..21e812a0d2ad --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -0,0 +1,129 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import logging +import threading +import uuid + +import six + +__all__ = ( + 'HelperThreadRegistry', + 'QueueCallbackThread', + 'STOP', +) + +_LOGGER = logging.getLogger(__name__) + +_HelperThread = collections.namedtuple( + 'HelperThreads', + ['name', 'thread', 'queue'], +) + + +# Helper thread stop indicator. This could be a sentinel object or None, +# but the sentinel object's ID can change if the process is forked, and +# None has the possibility of a user accidentally killing the helper +# thread. +STOP = uuid.uuid4() + + +class HelperThreadRegistry(object): + def __init__(self): + self._helper_threads = {} + + def __contains__(self, needle): + return needle in self._helper_threads + + def start(self, name, queue, target, *args, **kwargs): + """Create and start a helper thread. + + Args: + name (str): The name of the helper thread. + queue (Queue): A concurrency-safe queue. + target (Callable): The target of the thread. + args: Additional args passed to the thread constructor. + kwargs: Additional kwargs passed to the thread constructor. + + Returns: + threading.Thread: The created thread. + """ + # Create and start the helper thread. + thread = threading.Thread( + name='Consumer helper: {}'.format(name), + target=target, + *args, **kwargs + ) + thread.daemon = True + thread.start() + + # Keep track of the helper thread, so we are able to stop it. + self._helper_threads[name] = _HelperThread(name, thread, queue) + _LOGGER.debug('Started helper thread {}'.format(name)) + return thread + + def stop(self, name): + """Stops a helper thread. + + Sends the stop message and blocks until the thread joins. + + Args: + name (str): The name of the thread. + """ + # Attempt to retrieve the thread; if it is gone already, no-op. + helper_thread = self._helper_threads.get(name) + if helper_thread is None: + return + + # Join the thread if it is still alive. + if helper_thread.thread.is_alive(): + _LOGGER.debug('Stopping helper thread {}'.format(name)) + helper_thread.queue.put(STOP) + helper_thread.thread.join() + + # Remove the thread from our tracking. + self._helper_threads.pop(name, None) + + def stop_all(self): + """Stop all helper threads.""" + # This could be more efficient by sending the stop signal to all + # threads before joining any of them. + for name in list(six.iterkeys(self._helper_threads)): + self.stop(name) + + +class QueueCallbackThread(object): + """A helper thread that executes a callback for every item in + the queue. + """ + def __init__(self, queue, callback): + self.queue = queue + self._callback = callback + + def __call__(self): + while True: + item = self.queue.get() + if item == STOP: + break + + # Run the callback. If any exceptions occur, log them and + # continue. + try: + self._callback(item) + except Exception as exc: + _LOGGER.error('{class_}: {message}'.format( + class_=exc.__class__.__name__, + message=str(exc), + )) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py b/pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py new file mode 100644 index 000000000000..09f047495896 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py @@ -0,0 +1,155 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division + + +class Histogram(object): + """Representation of a single histogram. + + The purpose of this class is to store actual ack timing information + in order to predict how long to renew leases. + + The default implementation uses the 99th percentile of previous ack + times to implicitly lease messages; however, custom + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer` subclasses + are free to use a different formula. + + The precision of data stored is to the nearest integer. Additionally, + values outside the range of ``10 <= x <= 600`` are stored as ``10`` or + ``600``, since these are the boundaries of leases in the actual API. + """ + def __init__(self, data=None): + """Instantiate the histogram. + + Args: + data (Mapping[str, int]): The data strucure to be used to store + the underlying data. The default is an empty dictionary. + This can be set to a dictionary-like object if required + (for example, if a special object is needed for + concurrency reasons). + """ + # The data is stored as a dictionary, with the keys being the + # value being added and the values being the number of times that + # value was added to the dictionary. + # + # This is depending on the Python interpreter's implicit ordering + # of dictionaries, which is a bitwise sort by the key's ``hash()`` + # value. Because ``hash(int i) -> i`` and all of our keys are + # positive integers (negatives would be a problem because the sort + # is bitwise), we can rely on this. + if data is None: + data = {} + self._data = data + self._len = 0 + + def __len__(self): + """Return the total number of data points in this histogram. + + This is cached on a separate counter (rather than computing it using + ``sum([v for v in self._data.values()])``) to optimize lookup. + + Returns: + int: The total number of data points in this histogram. + """ + return self._len + + def __contains__(self, needle): + """Return True if needle is present in the histogram, False otherwise. + + Returns: + bool: True or False + """ + return needle in self._data + + def __repr__(self): + return ''.format( + len=len(self), + max=self.max, + min=self.min, + ) + + @property + def max(self): + """Return the maximum value in this histogram. + + If there are no values in the histogram at all, return 600. + + Returns: + int: The maximum value in the histogram. + """ + if len(self._data) == 0: + return 600 + return next(iter(reversed(sorted(self._data.keys())))) + + @property + def min(self): + """Return the minimum value in this histogram. + + If there are no values in the histogram at all, return 10. + + Returns: + int: The minimum value in the histogram. + """ + if len(self._data) == 0: + return 10 + return next(iter(sorted(self._data.keys()))) + + def add(self, value): + """Add the value to this histogram. + + Args: + value (int): The value. Values outside of ``10 <= x <= 600`` + will be raised to ``10`` or reduced to ``600``. + """ + # If the value is out of bounds, bring it in bounds. + value = int(value) + if value < 10: + value = 10 + if value > 600: + value = 600 + + # Add the value to the histogram's data dictionary. + self._data.setdefault(value, 0) + self._data[value] += 1 + self._len += 1 + + def percentile(self, percent): + """Return the value that is the Nth precentile in the histogram. + + Args: + percent (Union[int, float]): The precentile being sought. The + default consumer implementations use consistently use ``99``. + + Returns: + int: The value corresponding to the requested percentile. + """ + # Sanity check: Any value over 100 should become 100. + if percent >= 100: + percent = 100 + + # Determine the actual target number. + target = len(self) - len(self) * (percent / 100) + + # Iterate over the values in reverse, dropping the target by the + # number of times each value has been seen. When the target passes + # 0, return the value we are currently viewing. + for k in reversed(sorted(self._data.keys())): + target -= self._data[k] + if target < 0: + return k + + # The only way to get here is if there was no data. + # In this case, just return 10 seconds. + return 10 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py new file mode 100644 index 000000000000..afb9f7d7ca75 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -0,0 +1,98 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import pkg_resources + +from google.cloud.gapic.pubsub.v1 import subscriber_client + +from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.policy import thread + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(subscriber_client.SubscriberClient, + blacklist=('pull', 'streaming_pull')) +class Client(object): + """A subscriber client for Google Cloud Pub/Sub. + + This creates an object that is capable of subscribing to messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + policy_class (class): A class that describes how to handle + subscriptions. You may subclass the + :class:`.pubsub_v1.subscriber.policy.base.BasePolicy` + class in order to define your own consumer. This is primarily + provided to allow use of different concurrency models; the default + is based on :class:`threading.Thread`. + kwargs (dict): Any additional arguments provided are sent as keyword + keyword arguments to the underlying + :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. + Generally, you should not need to set additional keyword + arguments. + """ + def __init__(self, policy_class=thread.Policy, **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = subscriber_client.SubscriberClient(**kwargs) + + # The subcription class is responsible to retrieving and dispatching + # messages. + self._policy_class = policy_class + + def subscribe(self, subscription, callback=None, flow_control=()): + """Return a representation of an individual subscription. + + This method creates and returns a ``Consumer`` object (that is, a + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer`) + subclass) bound to the topic. It does `not` create the subcription + on the backend (or do any API call at all); it simply returns an + object capable of doing these things. + + If the ``callback`` argument is provided, then the :meth:`open` method + is automatically called on the returned object. If ``callback`` is + not provided, the subscription is returned unopened. + + .. note:: + It only makes sense to provide ``callback`` here if you have + already created the subscription manually in the API. + + Args: + subscription (str): The name of the subscription. The + subscription should have already been created (for example, + by using :meth:`create_subscription`). + callback (function): The callback function. This function receives + the :class:`~.pubsub_v1.types.PubsubMessage` as its only + argument. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. Use this to prevent situations where you are + inundated with too many messages at once. + + Returns: + ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance + of the defined ``consumer_class`` on the client. + """ + flow_control = types.FlowControl(*flow_control) + subscr = self._policy_class(self, subscription, flow_control) + if callable(callback): + subscr.open(callback) + return subscr diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py new file mode 100644 index 000000000000..1015149cfbbf --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -0,0 +1,198 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import math +import time + + +class Message(object): + """A representation of a single Pub/Sub message. + + The common way to interact with + :class:`~.pubsub_v1.subscriber.message.Message` objects is to receive + them in callbacks on subscriptions; most users should never have a need + to instantiate them by hand. (The exception to this is if you are + implementing a custom subclass to + :class:`~.pubsub_v1.subscriber.consumer.BaseConsumer`.) + + Attributes: + message_id (str): The message ID. In general, you should not need + to use this directly. + data (bytes): The data in the message. Note that this will be a + :class:`bytes`, not a text string. + attributes (dict): The attributes sent along with the message. + publish_time (datetime): The time that this message was originally + published. + """ + def __init__(self, message, ack_id, request_queue): + """Construct the Message. + + .. note:: + + This class should not be constructed directly; it is the + responsibility of :class:`BasePolicy` subclasses to do so. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The message received + from Pub/Sub. + ack_id (str): The ack_id received from Pub/Sub. + request_queue (queue.Queue): A queue provided by the policy that + can accept requests; the policy is responsible for handling + those requests. + """ + self._message = message + self._ack_id = ack_id + self._request_queue = request_queue + self.message_id = message.message_id + + # The instantiation time is the time that this message + # was received. Tracking this provides us a way to be smart about + # the default lease deadline. + self._received_timestamp = time.time() + + # The policy should lease this message, telling PubSub that it has + # it until it is acked or otherwise dropped. + self.lease() + + def __repr__(self): + # Get an abbreviated version of the data. + abbv_data = self._message.data + if len(abbv_data) > 50: + abbv_data = abbv_data[0:50] + b'...' + + # Return a useful representation. + answer = 'Message {\n' + answer += ' data: {0!r}\n'.format(abbv_data) + answer += ' attributes: {0!r}\n'.format(self.attributes) + answer += '}' + return answer + + @property + def attributes(self): + """Return the attributes of the underlying Pub/Sub Message. + + Returns: + dict: The message's attributes. + """ + return self._message.attributes + + @property + def data(self): + """Return the data for the underlying Pub/Sub Message. + + Returns: + bytes: The message data. This is always a bytestring; if you + want a text string, call :meth:`bytes.decode`. + """ + return self._message.data + + @property + def publish_time(self): + """Return the time that the message was originally published. + + Returns: + datetime: The date and time that the message was published. + """ + return self._message.publish_time + + @property + def size(self): + """Return the size of the underlying message, in bytes.""" + return self._message.ByteSize() + + def ack(self): + """Acknowledge the given message. + + Acknowledging a message in Pub/Sub means that you are done + with it, and it will not be delivered to this subscription again. + You should avoid acknowledging messages until you have + *finished* processing them, so that in the event of a failure, + you receive the message again. + + .. warning:: + Acks in Pub/Sub are best effort. You should always + ensure that your processing code is idempotent, as you may + receive any given message more than once. + """ + time_to_ack = math.ceil(time.time() - self._received_timestamp) + self._request_queue.put(('ack', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + 'time_to_ack': time_to_ack, + })) + + def drop(self): + """Release the message from lease management. + + This informs the policy to no longer hold on to the lease for this + message. Pub/Sub will re-deliver the message if it is not acknowledged + before the existing lease expires. + + .. warning:: + For most use cases, the only reason to drop a message from + lease management is on :meth:`ack` or :meth:`nack`; these methods + both call this one. You probably do not want to call this method + directly. + """ + self._request_queue.put(('drop', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) + + def lease(self): + """Inform the policy to lease this message continually. + + .. note:: + This method is called by the constructor, and you should never + need to call it manually. + """ + self._request_queue.put(('lease', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) + + def modify_ack_deadline(self, seconds): + """Set the deadline for acknowledgement to the given value. + + The default implementation handles this for you; you should not need + to manually deal with setting ack deadlines. The exception case is + if you are implementing your own custom subclass of + :class:`~.pubsub_v1.subcriber.consumer.BaseConsumer`. + + .. note:: + This is not an extension; it *sets* the deadline to the given + number of seconds from right now. It is even possible to use this + method to make a deadline shorter. + + Args: + seconds (int): The number of seconds to set the lease deadline + to. This should be between 0 and 600. Due to network latency, + values below 10 are advised against. + """ + self._request_queue.put(('modify_ack_deadline', { + 'ack_id': self._ack_id, + 'seconds': seconds, + })) + + def nack(self): + """Decline to acknowldge the given message. + + This will cause the message to be re-delivered to the subscription. + """ + self._request_queue.put(('nack', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py new file mode 100644 index 000000000000..85d047eb9439 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -0,0 +1,392 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division + +import abc +import logging +import random +import time + +import six + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber import _histogram + +logger = logging.getLogger(__name__) + + +@six.add_metaclass(abc.ABCMeta) +class BasePolicy(object): + """Abstract class defining a subscription policy. + + Although the :class:`~.pubsub_v1.subscriber.policy.thread.Policy` class, + based on :class:`threading.Thread`, is fine for most cases, + advanced users may need to implement something based on a different + concurrency model. + + This class defines the interface for the policy implementation; + subclasses may be passed as the ``policy_class`` argument to + :class:`~.pubsub_v1.client.SubscriberClient`. + """ + def __init__(self, client, subscription, + flow_control=types.FlowControl(), histogram_data=None): + """Instantiate the policy. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. + histogram_data (dict): Optional: A structure to store the histogram + data for predicting appropriate ack times. If set, this should + be a dictionary-like object. + + .. note:: + Additionally, the histogram relies on the assumption + that the dictionary will properly sort keys provided + that all keys are positive integers. If you are sending + your own dictionary class, ensure this assumption holds + or you will get strange behavior. + """ + self._client = client + self._subscription = subscription + self._consumer = _consumer.Consumer(self) + self._ack_deadline = 10 + self._last_histogram_size = 0 + self.flow_control = flow_control + self.histogram = _histogram.Histogram(data=histogram_data) + + # These are for internal flow control tracking. + # They should not need to be used by subclasses. + self._bytes = 0 + self._ack_on_resume = set() + self._paused = False + + @property + def ack_deadline(self): + """Return the appropriate ack deadline. + + This method is "sticky". It will only perform the computations to + check on the right ack deadline if the histogram has gained a + significant amount of new information. + + Returns: + int: The correct ack deadline. + """ + target = min([ + self._last_histogram_size * 2, + self._last_histogram_size + 100, + ]) + if len(self.histogram) > target: + self._ack_deadline = self.histogram.percentile(percent=99) + return self._ack_deadline + + @property + def managed_ack_ids(self): + """Return the ack IDs currently being managed by the policy. + + Returns: + set: The set of ack IDs being managed. + """ + if not hasattr(self, '_managed_ack_ids'): + self._managed_ack_ids = set() + return self._managed_ack_ids + + @property + def subscription(self): + """Return the subscription. + + Returns: + str: The subscription + """ + return self._subscription + + @property + def _load(self): + """Return the current load. + + The load is represented as a float, where 1.0 represents having + hit one of the flow control limits, and values between 0.0 and 1.0 + represent how close we are to them. (0.5 means we have exactly half + of what the flow control setting allows, for example.) + + There are (currently) two flow control settings; this property + computes how close the subscriber is to each of them, and returns + whichever value is higher. (It does not matter that we have lots of + running room on setting A if setting B is over.) + + Returns: + float: The load value. + """ + return max([ + len(self.managed_ack_ids) / self.flow_control.max_messages, + self._bytes / self.flow_control.max_bytes, + ]) + + def ack(self, ack_id, time_to_ack=None, byte_size=None): + """Acknowledge the message corresponding to the given ack_id. + + Args: + ack_id (str): The ack ID. + time_to_ack (int): The time it took to ack the message, measured + from when it was received from the subscription. This is used + to improve the automatic ack timing. + byte_size (int): The size of the PubSub message, in bytes. + """ + # If we got timing information, add it to the histogram. + if time_to_ack is not None: + self.histogram.add(int(time_to_ack)) + + # Send the request to ack the message. + # However, if the consumer is inactive, then queue the ack_id here + # instead; it will be acked as part of the initial request when the + # consumer is started again. + if self._consumer.active: + request = types.StreamingPullRequest(ack_ids=[ack_id]) + self._consumer.send_request(request) + else: + self._ack_on_resume.add(ack_id) + + # Remove the message from lease management. + self.drop(ack_id=ack_id, byte_size=byte_size) + + def call_rpc(self, request_generator): + """Invoke the Pub/Sub streaming pull RPC. + + Args: + request_generator (Generator): A generator that yields requests, + and blocks if there are no outstanding requests (until such + time as there are). + """ + return self._client.api.streaming_pull(request_generator) + + def drop(self, ack_id, byte_size): + """Remove the given ack ID from lease management. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + # Remove the ack ID from lease management, and decrement the + # byte counter. + if ack_id in self.managed_ack_ids: + self.managed_ack_ids.remove(ack_id) + self._bytes -= byte_size + self._bytes = min([self._bytes, 0]) + + # If we have been paused by flow control, check and see if we are + # back within our limits. + # + # In order to not thrash too much, require us to have passed below + # the resume threshold (80% by default) of each flow control setting + # before restarting. + if self._paused and self._load < self.flow_control.resume_threshold: + self._paused = False + self.open(self._callback) + + def get_initial_request(self, ack_queue=False): + """Return the initial request. + + This defines the initial request that must always be sent to Pub/Sub + immediately upon opening the subscription. + + Args: + ack_queue (bool): Whether to include any acks that were sent + while the connection was paused. + + Returns: + ~.pubsub_v1.types.StreamingPullRequest: A request suitable + for being the first request on the stream (and not suitable + for any other purpose). + + .. note:: + If ``ack_queue`` is set to True, this includes the ack_ids, but + also clears the internal set. + + This means that calls to :meth:`get_initial_request` with + ``ack_queue`` set to True are not idempotent. + """ + # Any ack IDs that are under lease management and not being acked + # need to have their deadline extended immediately. + ack_ids = set() + lease_ids = self.managed_ack_ids + if ack_queue: + ack_ids = self._ack_on_resume + lease_ids = lease_ids.difference(ack_ids) + + # Put the request together. + request = types.StreamingPullRequest( + ack_ids=list(ack_ids), + modify_deadline_ack_ids=list(lease_ids), + modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), + stream_ack_deadline_seconds=self.histogram.percentile(99), + subscription=self.subscription, + ) + + # Clear the ack_ids set. + # Note: If `ack_queue` is False, this just ends up being a no-op, + # since the set is just an empty set. + ack_ids.clear() + + # Return the initial request. + return request + + def lease(self, ack_id, byte_size): + """Add the given ack ID to lease management. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + # Add the ack ID to the set of managed ack IDs, and increment + # the size counter. + if ack_id not in self.managed_ack_ids: + self.managed_ack_ids.add(ack_id) + self._bytes += byte_size + + # Sanity check: Do we have too many things in our inventory? + # If we do, we need to stop the stream. + if self._load >= 1.0: + self._paused = True + self.close() + + def maintain_leases(self): + """Maintain all of the leases being managed by the policy. + + This method modifies the ack deadline for all of the managed + ack IDs, then waits for most of that time (but with jitter), and + then calls itself. + + .. warning:: + This method blocks, and generally should be run in a separate + thread or process. + + Additionally, you should not have to call this method yourself, + unless you are implementing your own policy. If you are + implementing your own policy, you _should_ call this method + in an appropriate form of subprocess. + """ + while True: + # Sanity check: Should this infinitely loop quit? + if not self._consumer.active: + return + + # Determine the appropriate duration for the lease. This is + # based off of how long previous messages have taken to ack, with + # a sensible default and within the ranges allowed by Pub/Sub. + p99 = self.histogram.percentile(99) + logger.debug('The current p99 value is %d seconds.' % p99) + + # Create a streaming pull request. + # We do not actually call `modify_ack_deadline` over and over + # because it is more efficient to make a single request. + ack_ids = list(self.managed_ack_ids) + logger.debug('Renewing lease for %d ack IDs.' % len(ack_ids)) + if len(ack_ids) > 0 and self._consumer.active: + request = types.StreamingPullRequest( + modify_deadline_ack_ids=ack_ids, + modify_deadline_seconds=[p99] * len(ack_ids), + ) + self._consumer.send_request(request) + + # Now wait an appropriate period of time and do this again. + # + # We determine the appropriate period of time based on a random + # period between 0 seconds and 90% of the lease. This use of + # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases + # where there are many clients. + snooze = random.uniform(0.0, p99 * 0.9) + logger.debug('Snoozing lease management for %f seconds.' % snooze) + time.sleep(snooze) + + def modify_ack_deadline(self, ack_id, seconds): + """Modify the ack deadline for the given ack_id. + + Args: + ack_id (str): The ack ID + seconds (int): The number of seconds to set the new deadline to. + """ + request = types.StreamingPullRequest( + modify_deadline_ack_ids=[ack_id], + modify_deadline_seconds=[seconds], + ) + self._consumer.send_request(request) + + def nack(self, ack_id, byte_size=None): + """Explicitly deny receipt of a message. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + self.modify_ack_deadline(ack_id=ack_id, seconds=0) + self.drop(ack_id=ack_id, byte_size=byte_size) + + @abc.abstractmethod + def close(self): + """Close the existing connection.""" + raise NotImplementedError + + @abc.abstractmethod + def on_exception(self, exception): + """Called when a gRPC exception occurs. + + If this method does nothing, then the stream is re-started. If this + raises an exception, it will stop the consumer thread. + This is executed on the response consumer helper thread. + + Args: + exception (Exception): The exception raised by the RPC. + """ + raise NotImplementedError + + @abc.abstractmethod + def on_response(self, response): + """Process a response from gRPC. + + This gives the consumer control over how responses are scheduled to + be processed. This method is expected to not block and instead + schedule the response to be consumed by some sort of concurrency. + + For example, if a the Policy implementation takes a callback in its + constructor, you can schedule the callback using a + :cls:`concurrent.futures.ThreadPoolExecutor`:: + + self._pool.submit(self._callback, response) + + This is called from the response consumer helper thread. + + Args: + response (Any): The protobuf response from the RPC. + """ + raise NotImplementedError + + @abc.abstractmethod + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + + Args: + callback (Callable[Message]): A callable that receives a + Pub/Sub Message. + """ + raise NotImplementedError diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py new file mode 100644 index 000000000000..df0f965748de --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -0,0 +1,147 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent import futures +from queue import Queue +import logging +import threading + +import grpc + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber.policy import base +from google.cloud.pubsub_v1.subscriber.message import Message + + +logger = logging.getLogger(__name__) + + +class Policy(base.BasePolicy): + """A consumer class based on :class:`threading.Thread`. + + This consumer handles the connection to the Pub/Sub service and all of + the concurrency needs. + """ + def __init__(self, client, subscription, flow_control=types.FlowControl(), + executor=None, queue=None): + """Instantiate the policy. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. + executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A + ThreadPoolExecutor instance, or anything duck-type compatible + with it. + queue (~queue.Queue): (Optional.) A Queue instance, appropriate + for crossing the concurrency boundary implemented by + ``executor``. + """ + # Default the callback to a no-op; it is provided by `.open`. + self._callback = lambda message: None + + # Create a queue for keeping track of shared state. + if queue is None: + queue = Queue() + self._request_queue = Queue() + + # Call the superclass constructor. + super(Policy, self).__init__( + client=client, + flow_control=flow_control, + subscription=subscription, + ) + + # Also maintain a request queue and an executor. + logger.debug('Creating callback requests thread (not starting).') + if executor is None: + executor = futures.ThreadPoolExecutor(max_workers=10) + self._executor = executor + self._callback_requests = _helper_threads.QueueCallbackThread( + self._request_queue, + self.on_callback_request, + ) + + def close(self): + """Close the existing connection.""" + # Close the main subscription connection. + self._consumer.helper_threads.stop('callback requests worker') + self._consumer.stop_consuming() + + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + + Args: + callback (Callable): The callback function. + """ + # Start the thread to pass the requests. + logger.debug('Starting callback requests worker.') + self._callback = callback + self._consumer.helper_threads.start( + 'callback requests worker', + self._request_queue, + self._callback_requests, + ) + + # Actually start consuming messages. + self._consumer.start_consuming() + + # Spawn a helper thread that maintains all of the leases for + # this policy. + logger.debug('Spawning lease maintenance worker.') + self._leaser = threading.Thread(target=self.maintain_leases) + self._leaser.daemon = True + self._leaser.start() + + def on_callback_request(self, callback_request): + """Map the callback request to the appropriate GRPC request.""" + action, kwargs = callback_request[0], callback_request[1] + getattr(self, action)(**kwargs) + + def on_exception(self, exception): + """Bubble the exception. + + This will cause the stream to exit loudly. + """ + # If this is DEADLINE_EXCEEDED, then we want to retry. + # That entails just returning None. + deadline_exceeded = grpc.StatusCode.DEADLINE_EXCEEDED + if getattr(exception, 'code', lambda: None)() == deadline_exceeded: + return + + # Raise any other exception. + raise exception + + def on_response(self, response): + """Process all received Pub/Sub messages. + + For each message, schedule a callback with the executor. + """ + for msg in response.received_messages: + logger.debug('New message received from Pub/Sub: %r', msg) + logger.debug(self._callback) + message = Message(msg.message, msg.ack_id, self._request_queue) + future = self._executor.submit(self._callback, message) + logger.debug('Result: %s' % future.result()) diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py new file mode 100644 index 000000000000..a9de4a88f7f8 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -0,0 +1,70 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import sys + +import psutil + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.gax.utils.messages import get_messages +from google.protobuf import timestamp_pb2 + + +# Define the default values for batching. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +BatchSettings = collections.namedtuple( + 'BatchSettings', + ['max_bytes', 'max_latency', 'max_messages'], +) +BatchSettings.__new__.__defaults__ = ( + 1024 * 1024 * 5, # max_bytes: 5 MB + 0.05, # max_latency: 0.05 seconds + 1000, # max_messages: 1,000 +) + +# Define the type class and default values for flow control settings. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +FlowControl = collections.namedtuple( + 'FlowControl', + ['max_bytes', 'max_messages', 'resume_threshold'], +) +FlowControl.__new__.__defaults__ = ( + psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM + float('inf'), # max_messages: no limit + 0.8, # resume_threshold: 80% +) + + +# Pub/Sub uses timestamps from the common protobuf package. +# Do not make users import from there. +Timestamp = timestamp_pb2.Timestamp + + +_names = ['BatchSettings', 'FlowControl', 'Timestamp'] +for name, message in get_messages(pubsub_pb2).items(): + message.__module__ = 'google.cloud.pubsub_v1.types' + setattr(sys.modules[__name__], name, message) + _names.append(name) + + +__all__ = tuple(sorted(_names)) diff --git a/pubsub/nox.py b/pubsub/nox.py index 4bcecafe66b4..c860e0741fe6 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -38,10 +38,10 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.pubsub', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', '--quiet', '--cov-append', '--cov-report=', + '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', + '--cov-config=.coveragerc', 'tests/unit', ) @@ -87,7 +87,8 @@ def lint(session): '--library-filesets', 'google', '--test-filesets', 'tests', # Temporarily allow this to fail. - success_codes=range(0, 100)) + success_codes=range(0, 100), + ) @nox.session diff --git a/pubsub/setup.py b/pubsub/setup.py index 71fee1dd7b8f..91bbeb8e2a8c 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', + 'author_email': 'googleapis-packages@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,9 +51,11 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.26.0, < 0.27dev', - 'grpcio >= 1.2.0, < 2.0dev', - 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', + 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', + 'grpcio >= 1.0.2, < 2.0dev', + 'psutil >= 5.2.2, < 6.0dev', ] setup( diff --git a/pubsub/tests/system.py b/pubsub/tests/system.py index eddfd1274da0..02666eae676a 100644 --- a/pubsub/tests/system.py +++ b/pubsub/tests/system.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. +# Copyright 2017, Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,395 +12,95 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime -import os -import unittest +from __future__ import absolute_import -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from grpc import StatusCode -import requests +import time +import uuid -from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.exceptions import Conflict -from google.cloud.pubsub import client +import mock +import six -from test_utils.retry import RetryInstanceState -from test_utils.retry import RetryResult -from test_utils.retry import RetryErrors -from test_utils.system import EmulatorCreds -from test_utils.system import unique_resource_id +from google import auth +from google.cloud import pubsub_v1 -def _unavailable(exc): - return exc_to_code(exc) == StatusCode.UNAVAILABLE +def _resource_name(resource_type): + """Return a randomly selected name for a resource. - -retry_unavailable = RetryErrors(GaxError, _unavailable) - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - CLIENT = None - IN_EMULATOR = False - - -def setUpModule(): - Config.IN_EMULATOR = os.getenv(PUBSUB_EMULATOR) is not None - if Config.IN_EMULATOR: - credentials = EmulatorCreds() - http = requests.Session() # Un-authorized. - Config.CLIENT = client.Client( - credentials=credentials, _http=http) - else: - Config.CLIENT = client.Client() - - -def _consume_topics(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve topics. - - :rtype: list - :returns: List of all topics encountered. - """ - return list(pubsub_client.list_topics()) - - -def _consume_snapshots(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve snapshots. - - :rtype: list - :returns: List of all snapshots encountered. - """ - return list(pubsub_client.list_snapshots()) - - -def _consume_subscriptions(topic): - """Consume entire iterator. - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: Topic to use to retrieve subscriptions. - - :rtype: list - :returns: List of all subscriptions encountered. + Args: + resource_type (str): The resource for which a name is being + generated. Should be singular (e.g. "topic", "subscription") """ - return list(topic.list_subscriptions()) - - -class TestPubsub(unittest.TestCase): - - def setUp(self): - self.to_delete = [] - - def tearDown(self): - for doomed in self.to_delete: - doomed.delete() - - def test_create_topic(self): - topic_name = 'a-new-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - self.assertTrue(topic.exists()) - self.assertEqual(topic.name, topic_name) - - with self.assertRaises(Conflict): - topic.create() - - def test_list_topics(self): - before = _consume_topics(Config.CLIENT) - topics_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for topic_name in topics_to_create: - topic = Config.CLIENT.topic(topic_name) - topic.create() - self.to_delete.append(topic) - - # Retrieve the topics. - def _all_created(result): - return len(result) == len(before) + len(topics_to_create) - - retry = RetryResult(_all_created) - after = retry(_consume_topics)(Config.CLIENT) - - created = [topic for topic in after - if topic.name in topics_to_create and - topic.project == Config.CLIENT.project] - self.assertEqual(len(created), len(topics_to_create)) - - def test_create_subscription_defaults(self): - TOPIC_NAME = 'create-sub-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - with self.assertRaises(Conflict): - subscription.create() - - def test_create_subscription_w_ack_deadline(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=120) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertEqual(subscription.ack_deadline, 120) - self.assertIs(subscription.topic, topic) - - def test_create_subscription_w_message_retention(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - duration = datetime.timedelta(hours=12) - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True, - message_retention_duration=duration) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertTrue(subscription.retain_acked_messages) - self.assertEqual(subscription.message_retention_duration, duration) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions(self): - TOPIC_NAME = 'list-sub' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - topic.create() - self.to_delete.append(topic) - empty = _consume_subscriptions(topic) - self.assertEqual(len(empty), 0) - subscriptions_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for subscription_name in subscriptions_to_create: - subscription = topic.subscription(subscription_name) - subscription.create() - self.to_delete.append(subscription) - - # Retrieve the subscriptions. - def _all_created(result): - return len(result) == len(subscriptions_to_create) - - retry = RetryResult(_all_created) - all_subscriptions = retry(_consume_subscriptions)(topic) - - created = [subscription for subscription in all_subscriptions - if subscription.name in subscriptions_to_create] - self.assertEqual(len(created), len(subscriptions_to_create)) - - def test_message_pull_mode_e2e(self): - import operator - TOPIC_NAME = 'message-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - MESSAGE_1 = b'MESSAGE ONE' - MESSAGE_2 = b'MESSAGE ONE' - EXTRA_1 = 'EXTRA 1' - EXTRA_2 = 'EXTRA 2' - topic.publish(MESSAGE_1, extra=EXTRA_1) - topic.publish(MESSAGE_2, extra=EXTRA_2) - - class Hoover(object): - - def __init__(self): - self.received = [] - - def done(self, *dummy): - return len(self.received) == 2 - - def suction(self): - with subscription.auto_ack(max_messages=2) as ack: - self.received.extend(ack.values()) - - hoover = Hoover() - retry = RetryInstanceState(hoover.done) - retry(hoover.suction)() - - message1, message2 = sorted(hoover.received, - key=operator.attrgetter('timestamp')) - - self.assertEqual(message1.data, MESSAGE_1) - self.assertEqual(message1.attributes['extra'], EXTRA_1) - self.assertIsNotNone(message1.service_timestamp) - - self.assertEqual(message2.data, MESSAGE_2) - self.assertEqual(message2.attributes['extra'], EXTRA_2) - self.assertIsNotNone(message2.service_timestamp) - - def _maybe_emulator_skip(self): - # NOTE: This method is necessary because ``Config.IN_EMULATOR`` - # is set at runtime rather than import time, which means we - # can't use the @unittest.skipIf decorator. - if Config.IN_EMULATOR: - self.skipTest('IAM not supported by Pub/Sub emulator') - - def test_topic_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_TOPICS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-topic-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - if topic.check_iam_permissions([PUBSUB_TOPICS_GET_IAM_POLICY]): - policy = topic.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = topic.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_subscription_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-sub-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - SUB_NAME = 'test-sub-iam-policy-sub' + unique_resource_id('-') - subscription = topic.subscription(SUB_NAME) - subscription.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(subscription.exists)() - self.to_delete.insert(0, subscription) - - if subscription.check_iam_permissions( - [PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY]): - policy = subscription.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = subscription.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_create_snapshot(self): - TOPIC_NAME = 'create-snap-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - before_snapshots = _consume_snapshots(Config.CLIENT) - - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=600) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - # There is no GET method for snapshot, so check existence using - # list - def retry_predicate(result): - return len(result) > len(before_snapshots) - - retry = RetryResult(retry_predicate, max_tries=5) - after_snapshots = retry(_consume_snapshots)(Config.CLIENT) - self.assertEqual(len(before_snapshots) + 1, len(after_snapshots)) - - def full_name(obj): - return obj.full_name - - self.assertIn(snapshot.full_name, map(full_name, after_snapshots)) - self.assertNotIn(snapshot.full_name, map(full_name, before_snapshots)) - - with self.assertRaises(Conflict): - snapshot.create() - - def test_seek(self): - TOPIC_NAME = 'seek-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - - SUBSCRIPTION_NAME = 'subscribing-to-seek' + unique_resource_id('-') - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - MESSAGE_1 = b'MESSAGE ONE' - topic.publish(MESSAGE_1) - MESSAGE_2 = b'MESSAGE TWO' - topic.publish(MESSAGE_2) - - ((ack_id_1a, recvd_1a), ) = subscription.pull() - ((ack_id_2a, recvd_2a), ) = subscription.pull() - before_data = [obj.data for obj in (recvd_1a, recvd_2a)] - self.assertIn(MESSAGE_1, before_data) - self.assertIn(MESSAGE_2, before_data) - subscription.acknowledge((ack_id_1a, ack_id_2a)) - - self.assertFalse(subscription.pull(return_immediately=True)) - - subscription.seek_snapshot(snapshot) - - ((_, recvd_1b), ) = subscription.pull() - ((_, recvd_2b), ) = subscription.pull() - after_data = [obj.data for obj in (recvd_1b, recvd_2b)] - self.assertEqual(sorted(before_data), sorted(after_data)) + return 'projects/{project}/{resource_type}s/st-n{random}'.format( + project=auth.default()[1], + random=str(uuid.uuid4())[0:8], + resource_type=resource_type, + ) + + +def test_publish_messages(): + publisher = pubsub_v1.PublisherClient() + topic_name = _resource_name('topic') + futures = [] + + try: + publisher.create_topic(topic_name) + for i in range(0, 500): + futures.append( + publisher.publish( + topic_name, + b'The hail in Wales falls mainly on the snails.', + num=str(i), + ), + ) + for future in futures: + result = future.result() + assert isinstance(result, (six.text_type, six.binary_type)) + finally: + publisher.delete_topic(topic_name) + + +def test_subscribe_to_messages(): + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_name = _resource_name('topic') + sub_name = _resource_name('subscription') + + try: + # Create a topic. + publisher.create_topic(topic_name) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription(sub_name, topic_name) + subscription = subscriber.subscribe(sub_name) + + # Publish some messages. + futures = [publisher.publish( + topic_name, + b'Wooooo! The claaaaaw!', + num=str(i), + ) for i in range(0, 50)] + + # Make sure the publish completes. + [f.result() for f in futures] + + # The callback should process the message numbers to prove + # that we got everything at least once. + callback = mock.Mock(wraps=lambda message: message.ack()) + + # Actually open the subscription and hold it open for a few seconds. + subscription.open(callback) + for second in range(0, 10): + time.sleep(1) + + # The callback should have fired at least fifty times, but it + # may take some time. + if callback.call_count >= 50: + return + + # Okay, we took too long; fail out. + assert callback.call_count >= 50 + finally: + publisher.delete_topic(topic_name) + subscriber.delete_subscription(sub_name) diff --git a/pubsub/tests/unit/__init__.py b/pubsub/tests/unit/__init__.py index 58e0d9153632..e69de29bb2d1 100644 --- a/pubsub/tests/unit/__init__.py +++ b/pubsub/tests/unit/__init__.py @@ -1,13 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py new file mode 100644 index 000000000000..05a749d58425 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -0,0 +1,69 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_batch(status=None, settings=types.BatchSettings()): + """Create a batch object, which does not commit. + + Args: + status (str): If provided, the batch's internal status will be set + to the provided status. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: The batch object + """ + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + batch = Batch(client, 'topic_name', settings, autocommit=False) + if status: + batch._status = status + return batch + + +def test_len(): + batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) + assert len(batch) == 0 + batch.publish(types.PubsubMessage(data=b'foo')) + assert len(batch) == 1 + + +def test_will_accept(): + batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) + message = types.PubsubMessage() + assert batch.will_accept(message) is True + + +def test_will_not_accept_status(): + batch = create_batch(status='talk to the hand') + message = types.PubsubMessage() + assert batch.will_accept(message) is False + + +def test_will_not_accept_size(): + batch = create_batch( + settings=types.BatchSettings(max_bytes=10), + status=BatchStatus.ACCEPTING_MESSAGES, + ) + message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') + assert batch.will_accept(message) is False diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py new file mode 100644 index 000000000000..00b761f52b96 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -0,0 +1,204 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import time + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def create_batch(autocommit=False, **batch_settings): + """Return a batch object suitable for testing. + + Args: + autocommit (bool): Whether the batch should commit after + ``max_latency`` seconds. By default, this is ``False`` + for unit testing. + kwargs (dict): Arguments passed on to the + :class:``~.pubsub_v1.types.BatchSettings`` constructor. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: A batch object. + """ + client = create_client() + settings = types.BatchSettings(**batch_settings) + return Batch(client, 'topic_name', settings, autocommit=autocommit) + + +def test_init(): + """Establish that a monitor thread is usually created on init.""" + client = create_client() + + # Do not actually create a thread, but do verify that one was created; + # it should be running the batch's "monitor" method (which commits the + # batch once time elapses). + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch = Batch(client, 'topic_name', types.BatchSettings()) + Thread.assert_called_once_with(target=batch.monitor) + + # New batches start able to accept messages by default. + assert batch.status == BatchStatus.ACCEPTING_MESSAGES + + +def test_init_infinite_latency(): + batch = create_batch(max_latency=float('inf')) + assert batch._thread is None + + +def test_client(): + client = create_client() + settings = types.BatchSettings() + batch = Batch(client, 'topic_name', settings, autocommit=False) + assert batch.client is client + + +def test_commit(): + batch = create_batch() + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch.commit() + + # A thread should have been created to do the actual commit. + Thread.assert_called_once_with(target=batch._commit) + Thread.return_value.start.assert_called_once_with() + + # The batch's status needs to be something other than "accepting messages", + # since the commit started. + assert batch.status != BatchStatus.ACCEPTING_MESSAGES + + +def test_blocking_commit(): + batch = create_batch() + futures = ( + batch.publish({'data': b'This is my message.'}), + batch.publish({'data': b'This is another message.'}), + ) + + # Set up the underlying API publish method to return a PublishResponse. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a', 'b']) + + # Actually commit the batch. + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with('topic_name', [ + types.PubsubMessage(data=b'This is my message.'), + types.PubsubMessage(data=b'This is another message.'), + ]) + + # Establish that all of the futures are done, and that they have the + # expected values. + assert all([f.done() for f in futures]) + assert futures[0].result() == 'a' + assert futures[1].result() == 'b' + + +def test_blocking_commit_no_messages(): + batch = create_batch() + with mock.patch.object(type(batch.client.api), 'publish') as publish: + batch._commit() + assert publish.call_count == 0 + + +def test_blocking_commit_wrong_messageid_length(): + batch = create_batch() + futures = ( + batch.publish({'data': b'blah blah blah'}), + batch.publish({'data': b'blah blah blah blah'}), + ) + + # Set up a PublishResponse that only returns one message ID. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a']) + batch._commit() + for future in futures: + assert future.done() + assert isinstance(future.exception(), exceptions.PublishError) + + +def test_monitor(): + batch = create_batch(max_latency=5.0) + with mock.patch.object(time, 'sleep') as sleep: + with mock.patch.object(type(batch), '_commit') as _commit: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # Since `monitor` runs in its own thread, it should call + # the blocking commit implementation. + _commit.assert_called_once_with() + + +def test_monitor_already_committed(): + batch = create_batch(max_latency=5.0) + batch._status = 'something else' + with mock.patch.object(time, 'sleep') as sleep: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # The status should not have changed. + assert batch._status == 'something else' + + +def test_publish(): + batch = create_batch() + messages = ( + types.PubsubMessage(data=b'foobarbaz'), + types.PubsubMessage(data=b'spameggs'), + types.PubsubMessage(data=b'1335020400'), + ) + + # Publish each of the messages, which should save them to the batch. + for message in messages: + batch.publish(message) + + # There should be three messages on the batch, and three futures. + assert len(batch.messages) == 3 + assert len(batch._futures) == 3 + + # The size should have been incremented by the sum of the size of the + # messages. + assert batch.size == sum([m.ByteSize() for m in messages]) + assert batch.size > 0 # I do not always trust protobuf. + + +def test_publish_dict(): + batch = create_batch() + batch.publish({'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) + + # There should be one message on the batch. + assert len(batch.messages) == 1 + + # It should be an actual protobuf Message at this point, with the + # expected values. + message = batch.messages[0] + assert isinstance(message, types.PubsubMessage) + assert message.data == b'foobarbaz' + assert message.attributes == {'spam': 'eggs'} diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py b/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py new file mode 100644 index 000000000000..e9b64a202e94 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py @@ -0,0 +1,118 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import pytest + +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.futures import Future + + +def test_cancel(): + assert Future().cancel() is False + + +def test_cancelled(): + assert Future().cancelled() is False + + +def test_running(): + assert Future().running() is True + + +def test_done(): + future = Future() + assert future.done() is False + future.set_result('12345') + assert future.done() is True + + +def test_exception_no_error(): + future = Future() + future.set_result('12345') + assert future.exception() is None + + +def test_exception_with_error(): + future = Future() + error = RuntimeError('Something really bad happened.') + future.set_exception(error) + + # Make sure that the exception that is returned is the batch's error. + # Also check the type to ensure the batch's error did not somehow + # change internally. + assert future.exception() is error + assert isinstance(future.exception(), RuntimeError) + with pytest.raises(RuntimeError): + future.result() + + +def test_exception_timeout(): + future = Future() + with pytest.raises(exceptions.TimeoutError): + future.exception(timeout=0.01) + + +def test_result_no_error(): + future = Future() + future.set_result('42') + assert future.result() == '42' + + +def test_result_with_error(): + future = Future() + future.set_exception(RuntimeError('Something really bad happened.')) + with pytest.raises(RuntimeError): + future.result() + + +def test_add_done_callback_pending_batch(): + future = Future() + callback = mock.Mock() + future.add_done_callback(callback) + assert len(future._callbacks) == 1 + assert callback in future._callbacks + assert callback.call_count == 0 + + +def test_add_done_callback_completed_batch(): + future = Future() + future.set_result('12345') + callback = mock.Mock(spec=()) + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_trigger(): + future = Future() + callback = mock.Mock(spec=()) + future.add_done_callback(callback) + assert callback.call_count == 0 + future.set_result('12345') + callback.assert_called_once_with(future) + + +def test_set_result_once_only(): + future = Future() + future.set_result('12345') + with pytest.raises(RuntimeError): + future.set_result('67890') + + +def test_set_exception_once_only(): + future = Future() + future.set_exception(ValueError('wah wah')) + with pytest.raises(RuntimeError): + future.set_exception(TypeError('other wah wah')) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py new file mode 100644 index 000000000000..0054b25262b5 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -0,0 +1,143 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.gapic.pubsub.v1 import publisher_client +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def test_init(): + client = create_client() + + # A plain client should have an `api` (the underlying GAPIC) and a + # batch settings object, which should have the defaults. + assert isinstance(client.api, publisher_client.PublisherClient) + assert client.batch_settings.max_bytes == 5 * (2 ** 20) + assert client.batch_settings.max_latency == 0.05 + assert client.batch_settings.max_messages == 1000 + + +def test_batch_accepting(): + """Establish that an existing batch is returned if it accepts messages.""" + client = create_client() + message = types.PubsubMessage(data=b'foo') + + # At first, there are no batches, so this should return a new batch + # which is also saved to the object. + ante = len(client._batches) + batch = client.batch('topic_name', message, autocommit=False) + assert len(client._batches) == ante + 1 + assert batch is client._batches['topic_name'] + + # A subsequent request should return the same batch. + batch2 = client.batch('topic_name', message, autocommit=False) + assert batch is batch2 + assert batch2 is client._batches['topic_name'] + + +def test_batch_without_autocreate(): + client = create_client() + message = types.PubsubMessage(data=b'foo') + + # If `create=False` is sent, then when the batch is not found, None + # is returned instead. + ante = len(client._batches) + batch = client.batch('topic_name', message, create=False) + assert batch is None + assert len(client._batches) == ante + + +def test_publish(): + client = create_client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'spam') + client.publish('topic_name', b'foo', bar='baz') + + # The batch's publish method should have been called twice. + assert batch.publish.call_count == 2 + + # In both cases + # The first call should correspond to the first message. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'spam' + assert not args[0].attributes + + # The second call should correspond to the second message. + _, args, _ = batch.publish.mock_calls[1] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_data_not_bytestring_error(): + client = create_client() + with pytest.raises(TypeError): + client.publish('topic_name', u'This is a text string.') + with pytest.raises(TypeError): + client.publish('topic_name', 42) + + +def test_publish_attrs_bytestring(): + client = create_client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'foo', bar=b'baz') + + # The attributes should have been sent as text. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_attrs_type_error(): + client = create_client() + with pytest.raises(TypeError): + client.publish('topic_name', b'foo', answer=42) + + +def test_gapic_instance_method(): + client = create_client() + with mock.patch.object(client.api, '_create_topic', autospec=True) as ct: + client.create_topic('projects/foo/topics/bar') + assert ct.call_count == 1 + _, args, _ = ct.mock_calls[0] + assert args[0] == types.Topic(name='projects/foo/topics/bar') + + +def test_gapic_class_method(): + client = create_client() + answer = client.topic_path('foo', 'bar') + assert answer == 'projects/foo/topics/bar' diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py new file mode 100644 index 000000000000..2a3429fbc5b3 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -0,0 +1,117 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_consumer(): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + subscription = client.subscribe('sub_name_e') + return _consumer.Consumer(policy=subscription) + + +def test_send_request(): + consumer = create_consumer() + request = types.StreamingPullRequest(subscription='foo') + with mock.patch.object(queue.Queue, 'put') as put: + consumer.send_request(request) + put.assert_called_once_with(request) + + +def test_request_generator_thread(): + consumer = create_consumer() + generator = consumer._request_generator_thread() + + # The first request that comes from the request generator thread + # should always be the initial request. + initial_request = next(generator) + assert initial_request.subscription == 'sub_name_e' + assert initial_request.stream_ack_deadline_seconds == 10 + + # Subsequent requests correspond to items placed in the request queue. + consumer.send_request(types.StreamingPullRequest(ack_ids=['i'])) + request = next(generator) + assert request.ack_ids == ['i'] + + # The poison pill should stop the loop. + consumer.send_request(_helper_threads.STOP) + with pytest.raises(StopIteration): + next(generator) + + +def test_blocking_consume(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we run out of them. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + consumer._blocking_consume() + assert on_res.call_count == 2 + assert on_res.mock_calls[0][1][1] == mock.sentinel.A + assert on_res.mock_calls[1][1][1] == mock.sentinel.B + + +def test_blocking_consume_keyboard_interrupt(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we are sent the exiting event. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + on_res.side_effect = KeyboardInterrupt + consumer._blocking_consume() + on_res.assert_called_once_with(consumer._policy, mock.sentinel.A) + + +@mock.patch.object(thread.Policy, 'call_rpc', autospec=True) +@mock.patch.object(thread.Policy, 'on_response', autospec=True) +@mock.patch.object(thread.Policy, 'on_exception', autospec=True) +def test_blocking_consume_exception_reraise(on_exc, on_res, call_rpc): + consumer = create_consumer() + + # Establish that we get responses until we are sent the exiting event. + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + on_res.side_effect = TypeError('Bad things!') + on_exc.side_effect = on_res.side_effect + with pytest.raises(TypeError): + consumer._blocking_consume() + + +def test_start_consuming(): + consumer = create_consumer() + helper_threads = consumer.helper_threads + with mock.patch.object(helper_threads, 'start', autospec=True) as start: + consumer.start_consuming() + assert consumer._exiting.is_set() is False + assert consumer.active is True + start.assert_called_once_with( + 'consume bidirectional stream', + consumer._request_queue, + consumer._blocking_consume, + ) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py new file mode 100644 index 000000000000..84775f0be2c1 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -0,0 +1,125 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import threading + +import mock + +from google.cloud.pubsub_v1.subscriber import _helper_threads + + +def test_start(): + registry = _helper_threads.HelperThreadRegistry() + queue_ = queue.Queue() + target = mock.Mock(spec=()) + with mock.patch.object(threading.Thread, 'start', autospec=True) as start: + registry.start('foo', queue_, target) + assert start.called + + +def test_stop_noop(): + registry = _helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +def test_stop_dead_thread(): + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +@mock.patch.object(queue.Queue, 'put') +@mock.patch.object(threading.Thread, 'is_alive') +@mock.patch.object(threading.Thread, 'join') +def test_stop_alive_thread(join, is_alive, put): + is_alive.return_value = True + + # Set up a registry with a helper thread in it. + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=queue.Queue(), + thread=threading.Thread(target=lambda: None), + ) + + # Assert that the helper thread is present, and removed correctly + # on stop. + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + # Assert that all of our mocks were called in the expected manner. + is_alive.assert_called_once_with() + join.assert_called_once_with() + put.assert_called_once_with(_helper_threads.STOP) + + +def test_stop_all(): + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_stop_all_noop(): + registry = _helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_queue_callback_thread(): + queue_ = queue.Queue() + callback = mock.Mock(spec=()) + qct = _helper_threads.QueueCallbackThread(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 2 + callback.assert_called_once_with(mock.sentinel.A) + + +def test_queue_callback_thread_exception(): + queue_ = queue.Queue() + callback = mock.Mock(spec=(), side_effect=(Exception,)) + qct = _helper_threads.QueueCallbackThread(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 2 + callback.assert_called_once_with(mock.sentinel.A) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py new file mode 100644 index 000000000000..23474a19d116 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -0,0 +1,84 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.pubsub_v1.subscriber import _histogram + + +def test_init(): + data = {} + histo = _histogram.Histogram(data=data) + assert histo._data is data + assert len(histo) == 0 + + +def test_contains(): + histo = _histogram.Histogram() + histo.add(10) + histo.add(20) + assert 10 in histo + assert 20 in histo + assert 30 not in histo + + +def test_max(): + histo = _histogram.Histogram() + assert histo.max == 600 + histo.add(120) + assert histo.max == 120 + histo.add(150) + assert histo.max == 150 + histo.add(20) + assert histo.max == 150 + + +def test_min(): + histo = _histogram.Histogram() + assert histo.min == 10 + histo.add(60) + assert histo.min == 60 + histo.add(30) + assert histo.min == 30 + histo.add(120) + assert histo.min == 30 + + +def test_add(): + histo = _histogram.Histogram() + histo.add(60) + assert histo._data[60] == 1 + histo.add(60) + assert histo._data[60] == 2 + + +def test_add_lower_limit(): + histo = _histogram.Histogram() + histo.add(5) + assert 5 not in histo + assert 10 in histo + + +def test_add_upper_limit(): + histo = _histogram.Histogram() + histo.add(12000) + assert 12000 not in histo + assert 600 in histo + + +def test_percentile(): + histo = _histogram.Histogram() + [histo.add(i) for i in range(101, 201)] + assert histo.percentile(100) == 200 + assert histo.percentile(101) == 200 + assert histo.percentile(99) == 199 + assert histo.percentile(1) == 101 diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py new file mode 100644 index 000000000000..a3a1e16f027e --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -0,0 +1,102 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import time + +import mock + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import message + + +def create_message(data, ack_id='ACKID', **attrs): + with mock.patch.object(message.Message, 'lease') as lease: + with mock.patch.object(time, 'time') as time_: + time_.return_value = 1335020400 + msg = message.Message(types.PubsubMessage( + attributes=attrs, + data=data, + message_id='message_id', + publish_time=types.Timestamp(seconds=1335020400 - 86400), + ), ack_id, queue.Queue()) + lease.assert_called_once_with() + return msg + + +def test_attributes(): + msg = create_message(b'foo', baz='bacon', spam='eggs') + assert msg.attributes == {'baz': 'bacon', 'spam': 'eggs'} + + +def test_data(): + msg = create_message(b'foo') + assert msg.data == b'foo' + + +def test_publish_time(): + msg = create_message(b'foo') + assert msg.publish_time == types.Timestamp(seconds=1335020400 - 86400) + + +def test_ack(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + with mock.patch.object(message.Message, 'drop') as drop: + msg.ack() + put.assert_called_once_with(('ack', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + 'time_to_ack': mock.ANY, + })) + + +def test_drop(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.drop() + put.assert_called_once_with(('drop', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) + + +def test_lease(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.lease() + put.assert_called_once_with(('lease', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) + + +def test_modify_ack_deadline(): + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.modify_ack_deadline(60) + put.assert_called_once_with(('modify_ack_deadline', { + 'ack_id': 'bogus_id', + 'seconds': 60, + })) + + +def test_nack(): + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.nack() + put.assert_called_once_with(('nack', { + 'ack_id': 'bogus_id', + 'byte_size': 25, + })) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py new file mode 100644 index 000000000000..df963424ccb9 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -0,0 +1,231 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(flow_control=types.FlowControl()): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + return thread.Policy(client, 'sub_name_d', flow_control=flow_control) + + +def test_ack_deadline(): + policy = create_policy() + assert policy.ack_deadline == 10 + policy.histogram.add(20) + assert policy.ack_deadline == 20 + policy.histogram.add(10) + assert policy.ack_deadline == 20 + + +def test_get_initial_request(): + policy = create_policy() + initial_request = policy.get_initial_request() + assert isinstance(initial_request, types.StreamingPullRequest) + assert initial_request.subscription == 'sub_name_d' + assert initial_request.stream_ack_deadline_seconds == 10 + + +def test_managed_ack_ids(): + policy = create_policy() + + # Ensure we always get a set back, even if the property is not yet set. + managed_ack_ids = policy.managed_ack_ids + assert isinstance(managed_ack_ids, set) + + # Ensure that multiple calls give the same actual object back. + assert managed_ack_ids is policy.managed_ack_ids + + +def test_subscription(): + policy = create_policy() + assert policy.subscription == 'sub_name_d' + + +def test_ack(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string', 20) + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + assert len(policy.histogram) == 1 + assert 20 in policy.histogram + + +def test_ack_no_time(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string') + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + assert len(policy.histogram) == 0 + + +def test_ack_paused(): + policy = create_policy() + policy._paused = True + policy._consumer.active = False + with mock.patch.object(policy, 'open') as open_: + policy.ack('ack_id_string') + open_.assert_called() + assert 'ack_id_string' in policy._ack_on_resume + + +def test_call_rpc(): + policy = create_policy() + with mock.patch.object(policy._client.api, 'streaming_pull') as pull: + policy.call_rpc(mock.sentinel.GENERATOR) + pull.assert_called_once_with(mock.sentinel.GENERATOR) + + +def test_drop(): + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 20 + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + + # Do this again to establish idempotency. + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + + +def test_drop_below_threshold(): + """Establish that we resume a paused subscription. + + If the subscription is paused, and we drop sufficiently below + the flow control thresholds, it should resume. + """ + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 20 + policy._paused = True + with mock.patch.object(policy, 'open') as open_: + policy.drop(ack_id='ack_id_string', byte_size=20) + open_.assert_called_once_with(policy._callback) + assert policy._paused is False + + +def test_load(): + flow_control = types.FlowControl(max_messages=10, max_bytes=1000) + policy = create_policy(flow_control=flow_control) + + # This should mean that our messages count is at 10%, and our bytes + # are at 15%; the ._load property should return the higher (0.15). + policy.lease(ack_id='one', byte_size=150) + assert policy._load == 0.15 + + # After this message is added, the messages should be higher at 20% + # (versus 16% for bytes). + policy.lease(ack_id='two', byte_size=10) + assert policy._load == 0.2 + + # Returning a number above 100% is fine. + policy.lease(ack_id='three', byte_size=1000) + assert policy._load == 1.16 + + +def test_modify_ack_deadline(): + policy = create_policy() + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.modify_ack_deadline('ack_id_string', 60) + send_request.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['ack_id_string'], + modify_deadline_seconds=[60], + )) + + +def test_maintain_leases_inactive_consumer(): + policy = create_policy() + policy._consumer.active = False + assert policy.maintain_leases() is None + + +def test_maintain_leases_ack_ids(): + policy = create_policy() + policy._consumer.active = True + policy.lease('my ack id', 50) + + # Mock the sleep object. + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + + # Also mock the consumer, which sends the request. + with mock.patch.object(policy._consumer, 'send_request') as send: + policy.maintain_leases() + send.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['my ack id'], + modify_deadline_seconds=[10], + )) + sleep.assert_called() + + +def test_maintain_leases_no_ack_ids(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + policy.maintain_leases() + sleep.assert_called() + + +def test_lease(): + policy = create_policy() + policy.lease(ack_id='ack_id_string', byte_size=20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + # Do this again to prove idempotency. + policy.lease(ack_id='ack_id_string', byte_size=20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + +def test_lease_above_threshold(): + flow_control = types.FlowControl(max_messages=2) + policy = create_policy(flow_control=flow_control) + with mock.patch.object(policy, 'close') as close: + policy.lease(ack_id='first_ack_id', byte_size=20) + assert close.call_count == 0 + policy.lease(ack_id='second_ack_id', byte_size=25) + close.assert_called_once_with() + + +def test_nack(): + policy = create_policy() + with mock.patch.object(policy, 'modify_ack_deadline') as mad: + with mock.patch.object(policy, 'drop') as drop: + policy.nack(ack_id='ack_id_string', byte_size=10) + drop.assert_called_once_with(ack_id='ack_id_string', byte_size=10) + mad.assert_called_once_with(ack_id='ack_id_string', seconds=0) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py new file mode 100644 index 000000000000..76aec184815e --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -0,0 +1,120 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent import futures +import queue +import threading + +import grpc + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(**kwargs): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + return thread.Policy(client, 'sub_name_c', **kwargs) + + +def test_init(): + policy = create_policy() + policy._callback(None) + + +def test_init_with_executor(): + executor = futures.ThreadPoolExecutor(max_workers=25) + policy = create_policy(executor=executor, queue=queue.Queue()) + assert policy._executor is executor + + +def test_close(): + policy = create_policy() + consumer = policy._consumer + with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: + policy.close() + stop_consuming.assert_called_once_with() + assert 'callback request worker' not in policy._consumer.helper_threads + + +@mock.patch.object(_helper_threads.HelperThreadRegistry, 'start') +@mock.patch.object(threading.Thread, 'start') +def test_open(thread_start, htr_start): + policy = create_policy() + with mock.patch.object(policy._consumer, 'start_consuming') as consuming: + policy.open(mock.sentinel.CALLBACK) + assert policy._callback is mock.sentinel.CALLBACK + consuming.assert_called_once_with() + htr_start.assert_called() + thread_start.assert_called() + + +def test_on_callback_request(): + policy = create_policy() + with mock.patch.object(policy, 'call_rpc') as call_rpc: + policy.on_callback_request(('call_rpc', {'something': 42})) + call_rpc.assert_called_once_with(something=42) + + +def test_on_exception_deadline_exceeded(): + policy = create_policy() + exc = mock.Mock(spec=('code',)) + exc.code.return_value = grpc.StatusCode.DEADLINE_EXCEEDED + assert policy.on_exception(exc) is None + + +def test_on_exception_other(): + policy = create_policy() + exc = TypeError('wahhhhhh') + with pytest.raises(TypeError): + policy.on_exception(exc) + + +def test_on_response(): + callback = mock.Mock(spec=()) + + # Set up the policy. + policy = create_policy() + policy._callback = callback + + # Set up the messages to send. + messages = ( + types.PubsubMessage(data=b'foo', message_id='1'), + types.PubsubMessage(data=b'bar', message_id='2'), + ) + + # Set up a valid response. + response = types.StreamingPullResponse( + received_messages=[ + {'ack_id': 'fack', 'message': messages[0]}, + {'ack_id': 'back', 'message': messages[1]}, + ], + ) + + # Actually run the method and prove that the callback was + # called in the expected way. + policy.on_response(response) + assert callback.call_count == 2 + for call in callback.mock_calls: + assert isinstance(call[1][0], message.Message) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py new file mode 100644 index 000000000000..50e90fead181 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -0,0 +1,44 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return subscriber.Client(credentials=creds) + + +def test_init(): + client = create_client() + assert client._policy_class is thread.Policy + + +def test_subscribe(): + client = create_client() + subscription = client.subscribe('sub_name_a') + assert isinstance(subscription, thread.Policy) + + +def test_subscribe_with_callback(): + client = create_client() + callback = mock.Mock() + with mock.patch.object(thread.Policy, 'open') as open_: + subscription = client.subscribe('sub_name_b', callback) + open_.assert_called_once_with(callback) + assert isinstance(subscription, thread.Policy) diff --git a/pubsub/tests/unit/test__gax.py b/pubsub/tests/unit/test__gax.py deleted file mode 100644 index dd2ea8077f84..000000000000 --- a/pubsub/tests/unit/test__gax.py +++ /dev/null @@ -1,1661 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -try: - # pylint: disable=unused-import - import google.cloud.pubsub._gax - # pylint: enable=unused-import -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False -else: - _HAVE_GRPC = True - -from google.cloud._testing import _GAXBaseAPI - - -def _make_credentials(): - # pylint: disable=redefined-outer-name - import google.auth.credentials - # pylint: enable=redefined-outer-name - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(object): - PROJECT = 'PROJECT' - PROJECT_PATH = 'projects/%s' % (PROJECT,) - LIST_TOPICS_PATH = '%s/topics' % (PROJECT_PATH,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SUB_NAME = 'sub_name' - SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = '%s/snapshots/%s' % (PROJECT_PATH, SNAPSHOT_NAME) - TIME = 12345 - - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_PublisherAPI(_Base, unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _PublisherAPI - - return _PublisherAPI - - def test_ctor(self): - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_topics_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - TOKEN = 'TOKEN' - response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], - page_token=TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_topics_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - response = _GAXPageIterator( - [_TopicPB(self.TOPIC_PATH)], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, NEW_TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_create(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_create_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXPublisherAPI(_create_topic_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXPublisherAPI(_create_topic_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_hit(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_get_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_hit(self): - gax_api = _GAXPublisherAPI(_delete_topic_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI(_delete_topic_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': B64, 'attributes': {}} - response = _PublishResponsePB([MSGID]) - gax_api = _GAXPublisherAPI(_publish_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_publish_miss_w_attrs_w_bytes_payload(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD) - MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} - timeout = 120 # 120 seconds or 2 minutes - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE], timeout=timeout) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data, B64) - self.assertEqual(message_pb.attributes, {'foo': 'bar'}) - self.assertEqual(options.is_bundling, False) - self.assertEqual(options.timeout, timeout) - - def test_topic_publish_error(self): - import base64 - from google.gax.errors import GaxError - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': B64, 'attributes': {}} - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator([local_sub_path]) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator( - [local_sub_path], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(next_token, NEW_TOKEN) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - name, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(name, self.TOPIC_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_list_subscriptions_miss(self): - from google.gax import INITIAL_PAGE - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_error(self): - from google.gax import INITIAL_PAGE - from google.gax.errors import GaxError - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_SubscriberAPI(_Base, unittest.TestCase): - - PUSH_ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _SubscriberAPI - - return _SubscriberAPI - - def test_ctor(self): - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb]) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_subscriptions_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 23) - self.assertEqual(options.page_token, TOKEN) - - def test_subscription_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_optional_params(self): - import datetime - - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - expected_ack_deadline = 1729 - expected_push_endpoint = 'push-endpoint' - expected_retain_acked_messages = True - expected_message_retention_duration = datetime.timedelta( - days=1, hours=7, minutes=2, seconds=9) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, ack_deadline=expected_ack_deadline, - push_endpoint=expected_push_endpoint, - retain_acked_messages=expected_retain_acked_messages, - message_retention_duration=expected_message_retention_duration) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - print(gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, expected_push_endpoint) - self.assertEqual(ack_deadline, expected_ack_deadline) - self.assertEqual(retain_acked_messages, expected_retain_acked_messages) - self.assertEqual(message_retention_duration.seconds, - expected_message_retention_duration.total_seconds()) - self.assertIsNone(options) - - def test_subscription_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - DEADLINE = 600 - gax_api = _GAXSubscriberAPI( - _create_subscription_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) - self.assertEqual(ack_deadline, DEADLINE) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_already_exists(self): - from google.cloud.exceptions import Conflict - - DEADLINE = 600 - gax_api = _GAXSubscriberAPI(_create_subscription_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) - self.assertEqual(ack_deadline, DEADLINE) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_get_hit(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - gax_api = _GAXSubscriberAPI(_get_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_get(self.SUB_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'pushConfig': { - 'pushEndpoint': self.PUSH_ENDPOINT, - }, - } - self.assertEqual(resource, expected) - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_modify_push_config_hit(self): - gax_api = _GAXSubscriberAPI(_modify_push_config_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_push_config(self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_pull_explicit(self): - import base64 - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import _datetime_to_rfc3339 - - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - NOW_PB = _datetime_to_pb_timestamp(NOW) - NOW_RFC3339 = _datetime_to_rfc3339(NOW) - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = { - 'messageId': MSG_ID, - 'data': B64, - 'attributes': {'a': 'b'}, - 'publishTime': NOW_RFC3339, - } - RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}] - message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB) - response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)]) - gax_api = _GAXSubscriberAPI(_pull_response=response_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - MAX_MESSAGES = 10 - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RECEIVED) - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, MAX_MESSAGES) - self.assertTrue(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_deadline_exceeded(self): - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - result = api.subscription_pull(self.SUB_PATH) - self.assertEqual(result, []) - - def test_subscription_pull_deadline_exceeded_return_immediately(self): - from google.gax.errors import GaxError - - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH, return_immediately=True) - - def test_subscription_acknowledge_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_acknowledge_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_modify_ack_deadline_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_list_snapshots_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB( - name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb]) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_list_snapshots_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB(name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_subscription_seek_hit(self): - gax_api = _GAXSubscriberAPI(_seek_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_seek_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Snapshot - - snapshot_pb = Snapshot(name=self.SNAPSHOT_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_snapshot_response=snapshot_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - expected = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXSubscriberAPI(_create_snapshot_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXSubscriberAPI(_create_snapshot_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_subscrption_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_snapshot_create_subscription_miss=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_publisher_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_publisher_api - - return make_gax_publisher_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - publisher_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_publisher_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - publisher_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_subscriber_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_subscriber_api - - return make_gax_subscriber_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - subscriber_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_subscriber_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - subscriber_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -class _GAXPublisherAPI(_GAXBaseAPI): - - _create_topic_failed_precondition = False - _create_topic_already_exists = False - - def list_topics(self, name, page_size, options): - self._list_topics_called_with = name, page_size, options - return self._list_topics_response - - def create_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._create_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if self._create_topic_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._create_topic_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - return self._create_topic_response - - def get_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._get_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_topic_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_topic_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def publish(self, topic, messages, options=None): - from google.gax.errors import GaxError - - self._publish_called_with = topic, messages, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._publish_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_topic_subscriptions(self, topic, page_size, options=None): - from google.gax.errors import GaxError - - self._list_topic_subscriptions_called_with = topic, page_size, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._list_topic_subscriptions_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - -class _GAXSubscriberAPI(_GAXBaseAPI): - - _create_snapshot_already_exists = False - _create_snapshot_failed_precondition = False - _create_subscription_already_exists = False - _create_subscription_failed_precondition = False - _modify_push_config_ok = False - _acknowledge_ok = False - _modify_ack_deadline_ok = False - _deadline_exceeded_gax_error = False - _snapshot_create_subscription_miss=False - - def list_subscriptions(self, project, page_size, options=None): - self._list_subscriptions_called_with = (project, page_size, options) - return self._list_subscriptions_response - - def create_subscription(self, name, topic, push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - options=None): - from google.gax.errors import GaxError - - self._create_subscription_called_with = ( - name, topic, push_config, ack_deadline_seconds, - retain_acked_messages, message_retention_duration, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_subscription_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._create_subscription_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - return self._create_subscription_response - - def get_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._get_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_subscription_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_subscription_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_push_config(self, name, push_config, options=None): - from google.gax.errors import GaxError - - self._modify_push_config_called_with = name, push_config, options - if self._random_gax_error: - raise GaxError('error') - if not self._modify_push_config_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def pull(self, name, max_messages, return_immediately, options=None): - from google.gax.errors import GaxError - - self._pull_called_with = ( - name, max_messages, return_immediately, options) - if self._random_gax_error: - raise GaxError('error') - if self._deadline_exceeded_gax_error: - raise GaxError('deadline exceeded', - self._make_grpc_deadline_exceeded()) - try: - return self._pull_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def acknowledge(self, name, ack_ids, options=None): - from google.gax.errors import GaxError - - self._acknowledge_called_with = name, ack_ids, options - if self._random_gax_error: - raise GaxError('error') - if not self._acknowledge_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_ack_deadline(self, name, ack_ids, deadline, options=None): - from google.gax.errors import GaxError - - self._modify_ack_deadline_called_with = ( - name, ack_ids, deadline, options) - if self._random_gax_error: - raise GaxError('error') - if not self._modify_ack_deadline_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_snapshots(self, project, page_size, options=None): - self._list_snapshots_called_with = (project, page_size, options) - return self._list_snapshots_response - - def create_snapshot(self, name, subscription, options=None): - from google.gax.errors import GaxError - - self._create_snapshot_called_with = (name, subscription, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_snapshot_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - if self._create_snapshot_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._snapshot_create_subscription_miss: - raise GaxError('miss', self._make_grpc_not_found()) - - return self._create_snapshot_response - - def delete_snapshot(self, snapshot, options=None): - from google.gax.errors import GaxError - - self._delete_snapshot_called_with = (snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._delete_snapshot_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def seek(self, subscription, time=None, snapshot=None, options=None): - from google.gax.errors import GaxError - - self._seek_called_with = (subscription, time, snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._seek_ok: - raise GaxError('miss', self._make_grpc_not_found()) - -class _TopicPB(object): - - def __init__(self, name): - self.name = name - - -class _PublishResponsePB(object): - - def __init__(self, message_ids): - self.message_ids = message_ids - - -class _PubsubMessagePB(object): - - def __init__(self, message_id, data, attributes, publish_time): - self.message_id = message_id - self.data = data - self.attributes = attributes - self.publish_time = publish_time - - -class _ReceivedMessagePB(object): - - def __init__(self, ack_id, message): - self.ack_id = ack_id - self.message = message - - -class _PullResponsePB(object): - - def __init__(self, received_messages): - self.received_messages = received_messages - - -class _Client(object): - - def __init__(self, project): - self.project = project diff --git a/pubsub/tests/unit/test__helpers.py b/pubsub/tests/unit/test__helpers.py deleted file mode 100644 index 0503d68b20b9..000000000000 --- a/pubsub/tests/unit/test__helpers.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_topic_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import topic_name_from_path - - return topic_name_from_path(path, project) - - def test_w_simple_name(self): - TOPIC_NAME = 'TOPIC_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - def test_w_name_w_all_extras(self): - TOPIC_NAME = 'TOPIC_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - -class Test_subscription_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import subscription_name_from_path - - return subscription_name_from_path(path, project) - - def test_w_simple_name(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - subscription_name = self._call_fut(PATH, PROJECT) - self.assertEqual(subscription_name, SUBSCRIPTION_NAME) - - def test_w_name_w_all_extras(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, SUBSCRIPTION_NAME) diff --git a/pubsub/tests/unit/test__http.py b/pubsub/tests/unit/test__http.py deleted file mode 100644 index 794fe093bbb3..000000000000 --- a/pubsub/tests/unit/test__http.py +++ /dev/null @@ -1,1165 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(unittest.TestCase): - PROJECT = 'PROJECT' - LIST_TOPICS_PATH = 'projects/%s/topics' % (PROJECT,) - LIST_SNAPSHOTS_PATH = 'projects/%s/snapshots' % (PROJECT,) - LIST_SUBSCRIPTIONS_PATH = 'projects/%s/subscriptions' % (PROJECT,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -class TestConnection(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import Connection - - return Connection - - def test_default_url(self): - conn = self._make_one(object()) - klass = self._get_target_class() - self.assertEqual(conn.api_base_url, klass.API_BASE_URL) - - def test_custom_url_from_env(self): - from google.cloud.environment_vars import PUBSUB_EMULATOR - - HOST = 'localhost:8187' - fake_environ = {PUBSUB_EMULATOR: HOST} - - with mock.patch('os.environ', new=fake_environ): - conn = self._make_one(object()) - - klass = self._get_target_class() - self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) - self.assertEqual(conn.api_base_url, 'http://' + HOST) - - def test_build_api_url_no_extra_query_params(self): - conn = self._make_one(object()) - URI = '/'.join([ - conn.API_BASE_URL, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo'), URI) - - def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - - conn = self._make_one(object()) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, - '/'.join(['', conn.API_VERSION, 'foo'])) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') - - def test_build_api_url_w_base_url_override(self): - base_url1 = 'api-base-url1' - base_url2 = 'api-base-url2' - conn = self._make_one(object()) - conn.api_base_url = base_url1 - URI = '/'.join([ - base_url2, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo', api_base_url=base_url2), - URI) - - def test_extra_headers(self): - import requests - - from google.cloud import _http as base_http - from google.cloud.pubsub import _http as MUT - - http = mock.create_autospec(requests.Session, instance=True) - response = requests.Response() - response.status_code = 200 - data = b'brent-spiner' - response._content = data - http.request.return_value = response - client = mock.Mock(_http=http, spec=['_http']) - - conn = self._make_one(client) - req_data = 'req-data-boring' - result = conn.api_request( - 'GET', '/rainbow', data=req_data, expect_json=False) - self.assertEqual(result, data) - - expected_headers = { - 'Accept-Encoding': 'gzip', - base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - 'User-Agent': conn.USER_AGENT, - } - expected_uri = conn.build_api_url('/rainbow') - http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method='GET', - url=expected_uri, - ) - - -class Test_PublisherAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _PublisherAPI - - return _PublisherAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - returned = {'topics': [{'name': self.TOPIC_PATH}]} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_topics_with_paging(self): - import six - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - RETURNED = { - 'topics': [{'name': self.TOPIC_PATH}], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - topics = list(page) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN2) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_topics_missing_key(self): - returned = {} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_create(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - connection = _Connection() - connection._no_response_error = Conflict - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_hit(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_hit(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': [MSGID]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - msg_data = connection._called_with['data']['messages'][0]['data'] - self.assertEqual(msg_data, B64_PAYLOAD) - - def test_topic_publish_twice(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': []} - connection = _Connection(RETURNED, RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - messages = connection._called_with['data']['messages'] - self.assertEqual(len(messages), 1) - self.assertEqual(messages[0]['data'], B64_PAYLOAD) - - def test_topic_publish_miss(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - - def test_topic_list_subscriptions_no_paging(self): - from google.cloud.pubsub.topic import Topic - from google.cloud.pubsub.subscription import Subscription - - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = {'subscriptions': [local_sub_path]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = { - 'subscriptions': [local_sub_path], - 'nextPageToken': TOKEN2, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_topic_list_subscriptions_missing_key(self): - from google.cloud.pubsub.topic import Topic - - connection = _Connection({}) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_miss(self): - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - list(api.topic_list_subscriptions(topic)) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - -class Test_SubscriberAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _SubscriberAPI - - return _SubscriberAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = {'subscriptions': [SUB_INFO]} - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_subscriptions_missing_key(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_subscription_create_defaults(self): - RESOURCE = {'topic': self.TOPIC_PATH} - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_retain_messages(self): - import datetime - - RESOURCE = {'topic': self.TOPIC_PATH, - 'retainAckedMessages': True, - 'messageRetentionDuration': { - 'seconds': 1729, - 'nanos': 2718 * 1000 - } - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - retain_acked_messages=True, - message_retention_duration=datetime.timedelta( - seconds=1729, microseconds=2718)) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_explicit(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RESOURCE = { - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': { - 'pushEndpoint': PUSH_ENDPOINT, - }, - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - ack_deadline=ACK_DEADLINE, push_endpoint=PUSH_ENDPOINT) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_get(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RETURNED = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_get(self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_delete(self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_modify_push_config(self): - PUSH_ENDPOINT = 'https://api.example.com/push' - BODY = { - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_push_config(self.SUB_PATH, PUSH_ENDPOINT) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyPushConfig' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_defaults(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - BODY = { - 'returnImmediately': False, - 'maxMessages': 1, - } - - received = api.subscription_pull(self.SUB_PATH) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(received[0]['message']['data'], PAYLOAD) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_explicit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - MAX_MESSAGES = 10 - BODY = { - 'returnImmediately': True, - 'maxMessages': MAX_MESSAGES, - } - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_acknowledge(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:acknowledge' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_modify_ack_deadline(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - 'ackDeadlineSeconds': NEW_DEADLINE, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyAckDeadline' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_list_snapshots_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = {'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_snapshots_with_paging(self): - import six - - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = { - 'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - 'nextPageToken': TOKEN2, - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - snapshots = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_subscription_seek_snapshot(self): - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - RETURNED = {} - BODY = { - 'snapshot': local_snapshot_path - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek( - self.SUB_PATH, snapshot=local_snapshot_path) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_seek_time(self): - time = '12345' - RETURNED = {} - BODY = { - 'time': time - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek(self.SUB_PATH, time=time) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create(self): - RETURNED = { - 'name': self.SNAPSHOT_PATH, - 'subscription': self.SUB_PATH - } - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import NotFound - - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - - -class Test_IAMPolicyAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _IAMPolicyAPI - - return _IAMPolicyAPI - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, None) - api = self._make_one(client) - self.assertEqual(api.api_request, connection.api_request) - - def test_get_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - RETURNED = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.get_iam_policy(self.TOPIC_PATH) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s:getIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_set_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - RETURNED = POLICY.copy() - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:setIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'policy': POLICY}) - - def test_test_iam_permissions(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = ALL_ROLES[1:] - RETURNED = {'permissions': ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, ALLOWED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - def test_test_iam_permissions_missing_key(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, []) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - -class Test__transform_messages_base64_empty(unittest.TestCase): - def _call_fut(self, messages, transform, key=None): - from google.cloud.pubsub._http import _transform_messages_base64 - - return _transform_messages_base64(messages, transform, key) - - def test__transform_messages_base64_empty_message(self): - from base64 import b64decode - - DATA = [{'message': {}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {}}]) - - def test__transform_messages_base64_empty_data(self): - from base64 import b64decode - - DATA = [{'message': {'data': b''}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {'data': b''}}]) - - def test__transform_messages_base64_pull(self): - from base64 import b64encode - - DATA = [{'message': {'data': b'testing 1 2 3'}}] - self._call_fut(DATA, b64encode, 'message') - self.assertEqual(DATA[0]['message']['data'], - b64encode(b'testing 1 2 3')) - - def test__transform_messages_base64_publish(self): - from base64 import b64encode - - DATA = [{'data': b'testing 1 2 3'}] - self._call_fut(DATA, b64encode) - self.assertEqual(DATA[0]['data'], b64encode(b'testing 1 2 3')) - - -class _Connection(object): - - _called_with = None - _no_response_error = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - from google.cloud.exceptions import NotFound - - self._called_with = kw - try: - response, self._responses = self._responses[0], self._responses[1:] - except IndexError: - err_class = self._no_response_error or NotFound - raise err_class('miss') - return response - - -class _Client(object): - - def __init__(self, connection, project): - self._connection = connection - self.project = project diff --git a/pubsub/tests/unit/test_client.py b/pubsub/tests/unit/test_client.py deleted file mode 100644 index 407683606330..000000000000 --- a/pubsub/tests/unit/test_client.py +++ /dev/null @@ -1,462 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestClient(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.client import Client - - return Client - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_publisher_api_wo_gax(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.publisher_api - - self.assertIsInstance(api, _PublisherAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - - def test_no_gax_ctor(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - with mock.patch('google.cloud.pubsub.client._USE_GRPC', - new=True): - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - self.assertFalse(client._use_grpc) - api = client.publisher_api - self.assertIsInstance(api, _PublisherAPI) - - def _publisher_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxPublisherAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_publisher_api=_generated_api, - GAXPublisherAPI=_GaxPublisherAPI) - with patch: - api = client.publisher_api - - self.assertIsInstance(api, _GaxPublisherAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_publisher_api_w_gax(self): - self._publisher_api_w_gax_helper() - - def test_publisher_api_w_gax_and_emulator(self): - self._publisher_api_w_gax_helper(emulator=True) - - def test_subscriber_api_wo_gax(self): - from google.cloud.pubsub._http import _SubscriberAPI - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.subscriber_api - - self.assertIsInstance(api, _SubscriberAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - - def _subscriber_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxSubscriberAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_subscriber_api=_generated_api, - GAXSubscriberAPI=_GaxSubscriberAPI) - with patch: - api = client.subscriber_api - - self.assertIsInstance(api, _GaxSubscriberAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_subscriber_api_w_gax(self): - self._subscriber_api_w_gax_helper() - - def test_subscriber_api_w_gax_and_emulator(self): - self._subscriber_api_w_gax_helper(emulator=True) - - def test_iam_policy_api(self): - from google.cloud.pubsub._http import _IAMPolicyAPI - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = _Connection() - - api = client.iam_policy_api - self.assertIsInstance(api, _IAMPolicyAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.iam_policy_api - self.assertIs(again, api) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_topics_with_paging(self): - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) - client._publisher_api = api - - iterator = client.list_topics(SIZE, TOKEN1) - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertEqual(next_page_token, TOKEN2) - - self.assertEqual(api._listed_topics, (self.PROJECT, 1, TOKEN1)) - - def test_list_topics_missing_key(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI() - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - returned = {'subscriptions': [SUB_INFO]} - client._connection = _Connection(returned) - - iterator = client.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_page_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': {}, - }) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - # Set up the mock response. - ACK_DEADLINE = 42 - PUSH_ENDPOINT = 'https://push.example.com/endpoint' - SUB_INFO = {'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}} - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - returned = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': TOKEN2, - } - client._connection = _Connection(returned) - - iterator = client.list_subscriptions( - SIZE, TOKEN1) - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_page_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, ACK_DEADLINE) - self.assertEqual(subscription.push_endpoint, PUSH_ENDPOINT) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': { - 'pageSize': SIZE, - 'pageToken': TOKEN1, - }, - }) - - def test_list_subscriptions_w_missing_key(self): - PROJECT = 'PROJECT' - creds = _make_credentials() - - client = self._make_one(project=PROJECT, credentials=creds) - client._connection = object() - api = client._subscriber_api = _FauxSubscriberAPI() - api._list_subscriptions_response = (), None - - subscriptions, next_page_token = client.list_subscriptions() - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_subscriptions, - (self.PROJECT, None, None)) - - def test_list_snapshots(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxSubscriberAPI() - response = api._list_snapshots_response = object() - client._subscriber_api = api - self.assertEqual(client.list_snapshots(), response) - self.assertEqual(api._listed_snapshots, (self.PROJECT, None, None)) - - def test_topic_factory(self): - PROJECT = 'PROJECT' - TOPIC_NAME = 'TOPIC_NAME' - creds = _make_credentials() - - client_obj = self._make_one(project=PROJECT, credentials=creds) - new_topic = client_obj.topic(TOPIC_NAME) - self.assertEqual(new_topic.name, TOPIC_NAME) - self.assertIs(new_topic._client, client_obj) - self.assertEqual(new_topic.project, PROJECT) - self.assertEqual(new_topic.full_name, - 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)) - self.assertFalse(new_topic.timestamp_messages) - - def test_subscription_factory(self): - project = 'PROJECT' - creds = _make_credentials() - client_obj = self._make_one(project=project, credentials=creds) - - sub_name = 'hoot-n-holler' - ack_deadline = 60, - push_endpoint = 'https://api.example.com/push' - message_retention_duration = datetime.timedelta(3600) - new_subscription = client_obj.subscription( - sub_name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=True, - message_retention_duration=message_retention_duration) - - self.assertEqual(new_subscription.name, sub_name) - self.assertIsNone(new_subscription.topic) - self.assertIs(new_subscription._client, client_obj) - self.assertEqual(new_subscription.project, project) - self.assertEqual(new_subscription.ack_deadline, ack_deadline) - self.assertEqual(new_subscription.push_endpoint, push_endpoint) - self.assertTrue(new_subscription.retain_acked_messages) - self.assertEqual( - new_subscription.message_retention_duration, - message_retention_duration) - - -class _Iterator(object): - - def __init__(self, items, token): - self._items = items or () - self.next_page_token = token - - def __iter__(self): - return iter(self._items) - - -class _FauxPublisherAPI(object): - - def __init__(self, items=None, token=None): - self._items = items - self._token = token - - def list_topics(self, project, page_size, page_token): - self._listed_topics = (project, page_size, page_token) - return _Iterator(self._items, self._token) - - -class _FauxSubscriberAPI(object): - - def list_subscriptions(self, project, page_size, page_token): - self._listed_subscriptions = (project, page_size, page_token) - return self._list_subscriptions_response - - def list_snapshots(self, project, page_size, page_token): - self._listed_snapshots = (project, page_size, page_token) - return self._list_snapshots_response - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/pubsub/tests/unit/test_iam.py b/pubsub/tests/unit/test_iam.py deleted file mode 100644 index 475d375d0cd8..000000000000 --- a/pubsub/tests/unit/test_iam.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestPolicy(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.iam import Policy - - return Policy - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - empty = frozenset() - policy = self._make_one() - self.assertIsNone(policy.etag) - self.assertIsNone(policy.version) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_ctor_explicit(self): - VERSION = 17 - ETAG = 'ETAG' - empty = frozenset() - policy = self._make_one(ETAG, VERSION) - self.assertEqual(policy.etag, ETAG) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_publishers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_PUBLISHER_ROLE, - ) - PUBLISHER = 'user:phred@example.com' - expected = set([PUBLISHER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.publishers = [PUBLISHER] - - self.assertEqual(policy.publishers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_PUBLISHER_ROLE: expected}) - - def test_subscribers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_SUBSCRIBER_ROLE, - ) - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - expected = set([SUBSCRIBER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.subscribers = [SUBSCRIBER] - - self.assertEqual(policy.subscribers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_SUBSCRIBER_ROLE: expected}) diff --git a/pubsub/tests/unit/test_message.py b/pubsub/tests/unit/test_message.py deleted file mode 100644 index b4f6abfbb1b2..000000000000 --- a/pubsub/tests/unit/test_message.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestMessage(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.message import Message - - return Message - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_ctor_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, ATTRS) - self.assertIsNone(message.service_timestamp) - - def test_timestamp_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_wo_timestamp_in_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_w_timestamp_in_attributes(self): - from datetime import datetime - from google.cloud._helpers import _RFC3339_MICROS - from google.cloud._helpers import UTC - - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - TIMESTAMP = '2015-04-10T18:42:27.131956Z' - naive = datetime.strptime(TIMESTAMP, _RFC3339_MICROS) - timestamp = naive.replace(tzinfo=UTC) - ATTRS = {'timestamp': TIMESTAMP} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.timestamp, timestamp) - - def test_from_api_repr_missing_data(self): - MESSAGE_ID = '12345' - api_repr = {'messageId': MESSAGE_ID} - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, b'') - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_from_api_repr_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(message.service_timestamp, TIMESTAMP) - - def test_from_api_repr_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - ATTRS = {'a': 'b'} - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - 'attributes': ATTRS, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.service_timestamp, TIMESTAMP) - self.assertEqual(message.attributes, ATTRS) diff --git a/pubsub/tests/unit/test_pubsub.py b/pubsub/tests/unit/test_pubsub.py new file mode 100644 index 000000000000..605dbddd7601 --- /dev/null +++ b/pubsub/tests/unit/test_pubsub.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import pubsub +from google.cloud import pubsub_v1 + + +def test_exported_things(): + assert pubsub.PublisherClient is pubsub_v1.PublisherClient + assert pubsub.SubscriberClient is pubsub_v1.SubscriberClient + assert pubsub.types is pubsub_v1.types diff --git a/pubsub/tests/unit/test_snpashot.py b/pubsub/tests/unit/test_snpashot.py deleted file mode 100644 index 5834a1fedd89..000000000000 --- a/pubsub/tests/unit/test_snpashot.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSnapshot(unittest.TestCase): - PROJECT = 'PROJECT' - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.snapshot import Snapshot - - return Snapshot - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - client = _Client(project=self.PROJECT) - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_w_subscription(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, - subscription=subscription) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_error(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - with self.assertRaises(TypeError): - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client, - subscription=subscription) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - klass = self._get_target_class() - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsInstance(snapshot.topic, Topic) - - def test_from_api_repr_w_deleted_topic(self): - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': klass._DELETED_TOPIC_PATH - } - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsNone(snapshot.topic) - - def test_from_api_repr_w_topics_w_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topics = {} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - topic = snapshot.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - - def test_from_api_repr_w_topics_w_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - self.assertIs(snapshot.topic, topic) - - def test_create_w_bound_client_error(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_response = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - with self.assertRaises(RuntimeError): - snapshot.create() - - def test_create_w_bound_subscription(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create() - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_create_w_bound_subscription_w_alternate_client(self): - client = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create(client=client2) - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - def test_delete_w_alternate_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self) - - -class _Topic(object): - - def __init__(self, name, client): - self._client = client - - -class _Subscription(object): - - def __init__(self, name, client=None): - self._client = client - self.full_name = 'projects/%s/subscriptions/%s' % ( - client.project, name, ) - - -class _FauxSubscriberAPI(object): - - def snapshot_create(self, snapshot_path, subscription_path): - self._snapshot_created = (snapshot_path, subscription_path, ) - - def snapshot_delete(self, snapshot_path): - self._snapshot_deleted = (snapshot_path, ) - - diff --git a/pubsub/tests/unit/test_subscription.py b/pubsub/tests/unit/test_subscription.py deleted file mode 100644 index ddf0ea439d77..000000000000 --- a/pubsub/tests/unit/test_subscription.py +++ /dev/null @@ -1,957 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSubscription(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'sub_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - DEADLINE = 42 - ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import Subscription - - return Subscription - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - def test_ctor_explicit(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_ctor_w_client_wo_topic(self): - client = _Client(project=self.PROJECT) - subscription = self._make_one(self.SUB_NAME, client=client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - - def test_ctor_w_both_topic_and_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME, topic, client=client2) - - def test_ctor_w_neither_topic_nor_client(self): - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_deleted_topic(self): - klass = self._get_target_class() - resource = {'topic': klass._DELETED_TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - topics = {} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_w_topic_match(self): - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - klass = self._get_target_class() - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_full_name_and_path(self): - PROJECT = 'PROJECT' - SUB_FULL = 'projects/%s/subscriptions/%s' % (PROJECT, self.SUB_NAME) - SUB_PATH = '/%s' % (SUB_FULL,) - TOPIC_NAME = 'topic_name' - CLIENT = _Client(project=PROJECT) - topic = _Topic(TOPIC_NAME, client=CLIENT) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.full_name, SUB_FULL) - self.assertEqual(subscription.path, SUB_PATH) - - def test_autoack_defaults(self): - from google.cloud.pubsub.subscription import AutoAck - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack() - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_autoack_explicit(self): - from google.cloud.pubsub.subscription import AutoAck - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack(True, 10, client2) - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, client2) - - def test_create_pull_wo_ack_deadline_w_bound_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.create() - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, None, None, None, None)) - - def test_create_push_w_ack_deadline_w_alternate_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT} - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.create(client=client2) - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, self.DEADLINE, self.ENDPOINT, - None, None)) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertFalse(subscription.exists()) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_exists_hit_w_alternate_client(self): - RESPONSE = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertTrue(subscription.exists(client=client2)) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_w_bound_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_sets_topic(self): - from google.cloud.pubsub.topic import Topic - - response = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = response - subscription = self._make_one(self.SUB_NAME, client=client) - - self.assertIsNone(subscription.topic) - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - - def test_reload_w_alternate_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.reload(client=client2) - - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_delete_w_bound_client(self): - RESPONSE = {} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.delete() - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_delete_w_alternate_client(self): - RESPONSE = {} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.delete(client=client2) - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_modify_push_config_w_endpoint_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_push_configuration(push_endpoint=self.ENDPOINT) - - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, self.ENDPOINT)) - - def test_modify_push_config_wo_endpoint_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - push_endpoint=self.ENDPOINT) - - subscription.modify_push_configuration(push_endpoint=None, - client=client2) - - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, None)) - - def test_pull_wo_return_immediately_max_messages_w_bound_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull() - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD, - 'attributes': {'a': 'b'}} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=True, max_messages=3, - client=client2) - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {'a': 'b'}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, True, 3)) - - def test_pull_wo_receivedMessages(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=False) - - self.assertEqual(len(pulled), 0) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_acknowledge_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2]) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_acknowledge_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2], client=client2) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_modify_ack_deadline_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline([ACK_ID1, ACK_ID2], self.DEADLINE) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_modify_ack_deadline_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline( - [ACK_ID1, ACK_ID2], self.DEADLINE, client=client2) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_snapshot(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - snapshot = subscription.snapshot(self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot.topic, topic) - - def test_seek_snapshot_w_bound_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - snapshot = Snapshot - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_snapshot_w_alternate_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client1) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot, client=client2) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_time_w_bound_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_seek_time_w_alternate_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time, client=client2) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = subscription.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.SUB_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = Policy() - new_policy = subscription.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - self.assertEqual(api._set_iam_policy, (self.SUB_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - -class _FauxSubscribererAPI(object): - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - self._subscription_created = ( - subscription_path, topic_path, ack_deadline, push_endpoint, - retain_acked_messages, message_retention_duration) - return self._subscription_create_response - - def subscription_get(self, subscription_path): - from google.cloud.exceptions import NotFound - - self._subscription_got = subscription_path - try: - return self._subscription_get_response - except AttributeError: - raise NotFound(subscription_path) - - def subscription_delete(self, subscription_path): - self._subscription_deleted = subscription_path - return self._subscription_delete_response - - def subscription_modify_push_config( - self, subscription_path, push_endpoint): - self._subscription_modified_push_config = ( - subscription_path, push_endpoint) - return self._subscription_modify_push_config_response - - def subscription_pull(self, subscription_path, return_immediately, - max_messages): - self._subscription_pulled = ( - subscription_path, return_immediately, max_messages) - return self._subscription_pull_response - - def subscription_acknowledge(self, subscription_path, ack_ids): - self._subscription_acked = (subscription_path, ack_ids) - return self._subscription_acknowlege_response - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - self._subscription_modified_ack_deadline = ( - subscription_path, ack_ids, ack_deadline) - return self._subscription_modify_ack_deadline_response - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - self._subscription_seeked = ( - subscription_path, time, snapshot) - return self._subscription_seek_response - - -class TestAutoAck(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import AutoAck - - return AutoAck - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_ctor_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, CLIENT) - - def test___enter___w_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, False) - self.assertEqual(subscription._max_messages, 1) - self.assertIsNone(subscription._client) - - def test___enter___w_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, True) - self.assertEqual(subscription._max_messages, 10) - self.assertIs(subscription._client, CLIENT) - - def test___exit___(self): - CLIENT = object() - ACK_ID1, MESSAGE1 = 'ACK_ID1', _FallibleMessage() - ACK_ID2, MESSAGE2 = 'ACK_ID2', _FallibleMessage() - ACK_ID3, MESSAGE3 = 'ACK_ID3', _FallibleMessage(True) - ITEMS = [ - (ACK_ID1, MESSAGE1), - (ACK_ID2, MESSAGE2), - (ACK_ID3, MESSAGE3), - ] - subscription = _FauxSubscription(ITEMS) - auto_ack = self._make_one(subscription, client=CLIENT) - with auto_ack: - for ack_id, message in list(auto_ack.items()): - if message.fail: - del auto_ack[ack_id] - self.assertEqual(sorted(subscription._acknowledged), - [ACK_ID1, ACK_ID2]) - self.assertIs(subscription._ack_client, CLIENT) - - def test_empty_ack_no_acknowledge(self): - subscription = mock.Mock(_FauxSubscription) - subscription.pull = lambda *args: [] - - auto_ack = self._make_one(subscription) - with auto_ack: - pass - - subscription.acknowledge.assert_not_called() - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name, client): - self.name = name - self._client = client - self.project = client.project - self.full_name = 'projects/%s/topics/%s' % (client.project, name) - self.path = '/projects/%s/topics/%s' % (client.project, name) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name, timestamp_messages=False): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - -class _FallibleMessage(object): - - def __init__(self, fail=False): - self.fail = fail - - -class _FauxSubscription(object): - - def __init__(self, items): - self._items = items - self._mapping = dict(items) - self._acknowledged = set() - - def pull(self, return_immediately=False, max_messages=1, client=None): - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - return self._items - - def acknowledge(self, ack_ids, client=None): - self._ack_client = client - for ack_id in ack_ids: - message = self._mapping[ack_id] - assert not message.fail - self._acknowledged.add(ack_id) diff --git a/pubsub/tests/unit/test_topic.py b/pubsub/tests/unit/test_topic.py deleted file mode 100644 index 2c90432195c2..000000000000 --- a/pubsub/tests/unit/test_topic.py +++ /dev/null @@ -1,974 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestTopic(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Topic - - return Topic - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_w_explicit_timestamp(self): - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, - client=client, - timestamp_messages=True) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertTrue(topic.timestamp_messages) - - def test_from_api_repr(self): - client = _Client(project=self.PROJECT) - resource = {'name': self.TOPIC_PATH} - klass = self._get_target_class() - topic = klass.from_api_repr(resource, client=client) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertIs(topic._client, client) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - - def test_from_api_repr_with_bad_client(self): - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' - client = _Client(project=PROJECT1) - PATH = 'projects/%s/topics/%s' % (PROJECT2, self.TOPIC_NAME) - resource = {'name': PATH} - klass = self._get_target_class() - self.assertRaises(ValueError, klass.from_api_repr, - resource, client=client) - - def test_create_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.create() - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_create_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.create(client=client2) - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - self.assertFalse(topic.exists()) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_exists_hit_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_get_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - self.assertTrue(topic.exists(client=client2)) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.delete() - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_delete_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.delete(client=client2) - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_publish_single_bytes_wo_attrs_w_bound_client(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): - import datetime - from google.cloud._helpers import _RFC3339_MICROS - - NOW = datetime.datetime.utcnow() - - def _utcnow(): - return NOW - - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = { - 'data': PAYLOAD, - 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - - topic = self._make_one(self.TOPIC_NAME, client=client1, - timestamp_messages=True) - with mock.patch('google.cloud.pubsub.topic._NOW', new=_utcnow): - msgid = topic.publish(PAYLOAD, client=client2) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - OVERRIDE = '2015-04-10T16:46:22.868399Z' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': OVERRIDE}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client, - timestamp_messages=True) - - msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_w_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD, attr1='value1', attr2='value2') - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_with_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_without_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_multiple_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_w_no_messages(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - pass - - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._api_called, 0) - - def test_publish_multiple_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = { - 'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - with topic.batch(client=client2) as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_multiple_error(self): - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - try: - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertEqual(list(batch), []) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_subscription(self): - from google.cloud.pubsub.subscription import Subscription - - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, client=client) - - SUBSCRIPTION_NAME = 'subscription_name' - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions_no_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - 'nextPageToken': TOKEN, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertEqual(next_page_token, TOKEN) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - PAGE_SIZE = 10 - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions( - page_size=PAGE_SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], - {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) - - def test_list_subscriptions_missing_key(self): - from google.cloud.pubsub.client import Client - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - client._connection = _Connection({}) - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client) - - policy = topic.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = topic.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, - 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, - 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, - 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, - 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, - 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = topic.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.TOPIC_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = Policy() - new_policy = topic.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - - self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = self._make_one(self.TOPIC_NAME, client=client) - - allowed = topic.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - allowed = topic.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - -class TestBatch(unittest.TestCase): - PROJECT = 'PROJECT' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Batch - - return Batch - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_ctor_defaults(self): - topic = _Topic() - client = _Client(project=self.PROJECT) - batch = self._make_one(topic, client) - self.assertIs(batch.topic, topic) - self.assertIs(batch.client, client) - self.assertEqual(len(batch.messages), 0) - self.assertEqual(len(batch.message_ids), 0) - - def test___iter___empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - self.assertEqual(list(batch), []) - - def test___iter___non_empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - batch.message_ids[:] = ['ONE', 'TWO', 'THREE'] - self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE']) - - def test_publish_bytes_wo_attrs(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {}} - client = _Client(project=self.PROJECT) - topic = _Topic() - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_publish_bytes_w_add_timestamp(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': 'TIMESTAMP'}} - client = _Client(project=self.PROJECT) - topic = _Topic(timestamp_messages=True) - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_commit_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, - 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit() - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_commit_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client1 = _Client(project='PROJECT') - client2 = _Client(project='PROJECT') - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client1) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit(client=client2) - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_success(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertIs(other, batch) - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_failure(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - topic = _Topic() - batch = self._make_one(topic, client=client) - - try: - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertIs(other, batch) - self.assertEqual(list(batch), []) - self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2]) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_batch_messages(self): - # Establish that a batch actually batches messsages in the expected - # way. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Check the contents of the batch. - self.assertEqual(batch.messages, [ - {'data': 'Batch message 0.', 'attributes': {}}, - {'data': 'Batch message 1.', 'attributes': {}}, - {'data': 'Batch message 2.', 'attributes': {}}, - {'data': 'Batch message 3.', 'attributes': {}}, - ]) - - def test_message_count_autocommit(self): - # Establish that if the batch is assigned to take a maximum - # number of messages, that it commits when it reaches that maximum. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_messages=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Publish a fifth message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - @mock.patch('time.time') - def test_message_time_autocommit(self, mock_time): - # Establish that if the batch is sufficiently old, that it commits - # the next time it receives a publish. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - mock_time.return_value = 0.0 - with self._make_one(topic, client=client, max_interval=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish some messages and establish that the batch does - # not commit. - for i in range(0, 10): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Move time ahead so that this batch is too old. - mock_time.return_value = 10.0 - - # Publish another message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - def test_message_size_autocommit(self): - # Establish that if the batch is sufficiently large, that it - # auto-commits. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_size=100) as batch: - self.assertIsInstance(batch, Batch) - - # Publish a short (< 100 bytes) message and establish that - # the batch does not commit. - batch.publish(b'foo') - commit.assert_not_called() - - # Publish another message and observe the commit. - batch.publish(u'The final call to trigger a commit, because ' - u'this message is sufficiently long.') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - -class _FauxPublisherAPI(object): - _api_called = 0 - - def topic_create(self, topic_path): - self._topic_created = topic_path - return self._topic_create_response - - def topic_get(self, topic_path): - from google.cloud.exceptions import NotFound - - self._topic_got = topic_path - try: - return self._topic_get_response - except AttributeError: - raise NotFound(topic_path) - - def topic_delete(self, topic_path): - self._topic_deleted = topic_path - return self._topic_delete_response - - def topic_publish(self, topic_path, messages): - self._topic_published = topic_path, messages - self._api_called += 1 - return self._topic_publish_response - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name="NAME", project="PROJECT", - timestamp_messages=False): - self.full_name = 'projects/%s/topics/%s' % (project, name) - self.path = '/%s' % (self.full_name,) - self.timestamp_messages = timestamp_messages - - def _timestamp_message(self, attrs): - if self.timestamp_messages: - attrs['timestamp'] = 'TIMESTAMP' - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - -class _Bugout(Exception): - pass - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response