diff --git a/docs/index.rst b/docs/index.rst index ee47a2ac378f..d9afe5f31af4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,7 +8,7 @@ datastore/usage dns/usage language/usage - pubsub/usage + pubsub/index resource-manager/api runtimeconfig/usage spanner/usage diff --git a/docs/pubsub/client.rst b/docs/pubsub/client.rst deleted file mode 100644 index 2745c1d808ee..000000000000 --- a/docs/pubsub/client.rst +++ /dev/null @@ -1,6 +0,0 @@ -Pub/Sub Client -============== - -.. automodule:: google.cloud.pubsub.client - :members: - :show-inheritance: diff --git a/docs/pubsub/iam.rst b/docs/pubsub/iam.rst deleted file mode 100644 index 26943762605b..000000000000 --- a/docs/pubsub/iam.rst +++ /dev/null @@ -1,7 +0,0 @@ -IAM Policy -~~~~~~~~~~ - -.. automodule:: google.cloud.pubsub.iam - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/index.rst b/docs/pubsub/index.rst new file mode 100644 index 000000000000..7b7438b29f9c --- /dev/null +++ b/docs/pubsub/index.rst @@ -0,0 +1,117 @@ +####### +Pub/Sub +####### + +`Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that +allows you to send and receive messages between independent applications. You +can leverage Cloud Pub/Sub’s flexibility to decouple systems and components +hosted on Google Cloud Platform or elsewhere on the Internet. By building on +the same technology Google uses, Cloud Pub/Sub is designed to provide “at +least once” delivery at low latency with on-demand scalability to 1 million +messages per second (and beyond). + +.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/ + +******************************** +Authentication and Configuration +******************************** + +- For an overview of authentication in ``google-cloud-python``, + see :doc:`/core/auth`. + +- In addition to any authentication configuration, you should also set the + :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd + like to interact with. If the :envvar:`GOOGLE_CLOUD_PROJECT` environment + variable is not present, the project ID from JSON file credentials is used. + + If you are using Google App Engine or Google Compute Engine + this will be detected automatically. + +- After configuring your environment, create a + :class:`~google.cloud.pubsub_v1.PublisherClient` or + :class:`~google.cloud.pubsub_v1.SubscriberClient`. + +.. code-block:: python + + >>> from google.cloud import pubsub + >>> publisher = pubsub.PublisherClient() + >>> subscriber = pubsub.SubscriberClient() + +or pass in ``credentials`` explicitly. + +.. code-block:: python + + >>> from google.cloud import pubsub + >>> client = pubsub.PublisherClient( + ... credentials=creds, + ... ) + +********** +Publishing +********** + +To publish data to Cloud Pub/Sub you must create a topic, and then publish +messages to it + +.. code-block:: python + + >>> import os + >>> from google.cloud import pubsub + >>> + >>> publisher = pubsub.PublisherClient() + >>> topic = 'projects/{project_id}/topics/{topic}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... topic='MY_TOPIC_NAME', # Set this to something appropriate. + ... ) + >>> publisher.create_topic() + >>> publisher.publish(topic, b'My first message!', spam='eggs') + +To learn more, consult the :doc:`publishing documentation `. + + +*********** +Subscribing +*********** + +To subscribe to data in Cloud Pub/Sub, you create a subscription based on +the topic, and subscribe to that. + +.. code-block:: python + + >>> import os + >>> from google.cloud import pubsub + >>> + >>> subscriber = pubsub.SubscriberClient() + >>> topic = 'projects/{project_id}/topics/{topic}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... topic='MY_TOPIC_NAME', # Set this to something appropriate. + ... ) + >>> subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format( + ... project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + ... sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. + ... ) + >>> subscription = subscriber.create_subscription(topic, subscription) + +The subscription is opened asychronously, and messages are processed by +use of a callback. + +.. code-block:: python + + >>> def callback(message): + ... print(message.data) + ... message.ack() + >>> subscription.open(callback) + +To learn more, consult the :doc:`subscriber documentation `. + + +********** +Learn More +********** + +.. toctree:: + :maxdepth: 3 + + publisher/index + subscriber/index + types diff --git a/docs/pubsub/message.rst b/docs/pubsub/message.rst deleted file mode 100644 index 654c607d46b3..000000000000 --- a/docs/pubsub/message.rst +++ /dev/null @@ -1,6 +0,0 @@ -Message -~~~~~~~ - -.. automodule:: google.cloud.pubsub.message - :members: - :show-inheritance: diff --git a/docs/pubsub/publisher/api/batch.rst b/docs/pubsub/publisher/api/batch.rst new file mode 100644 index 000000000000..5846d3ff9416 --- /dev/null +++ b/docs/pubsub/publisher/api/batch.rst @@ -0,0 +1,8 @@ +:orphan: + +Batch API +========= + +.. automodule:: google.cloud.pubsub_v1.publisher.batch.thread + :members: + :inherited-members: diff --git a/docs/pubsub/publisher/api/client.rst b/docs/pubsub/publisher/api/client.rst new file mode 100644 index 000000000000..47a3aa3d5d7a --- /dev/null +++ b/docs/pubsub/publisher/api/client.rst @@ -0,0 +1,6 @@ +Publisher Client API +==================== + +.. automodule:: google.cloud.pubsub_v1.publisher.client + :members: + :inherited-members: diff --git a/docs/pubsub/publisher/index.rst b/docs/pubsub/publisher/index.rst new file mode 100644 index 000000000000..72b374b588a3 --- /dev/null +++ b/docs/pubsub/publisher/index.rst @@ -0,0 +1,126 @@ +Publishing Messages +=================== + +Publishing messages is handled through the +:class:`~.pubsub_v1.publisher.client.Client` class (aliased as +``google.cloud.pubsub.PublisherClient``). This class provides methods to +create topics, and (most importantly) a +:meth:`~.pubsub_v1.publisher.client.Client.publish` method that publishes +messages to Pub/Sub. + +Instantiating a publishing client is straightforward: + +.. code-block:: python + + from google.cloud import pubsub + publish_client = pubsub.PublisherClient() + + +Publish a Message +----------------- + +To publish a message, use the +:meth:`~.pubsub_v1.publisher.client.Client.publish` method. This method accepts +two positional arguments: the topic to publish to, and the body of the message. +It also accepts arbitrary keyword arguments, which are passed along as +attributes of the message. + +The topic is passed along as a string; all topics have the canonical form of +``projects/{project_name}/topics/{topic_name}``. + +Therefore, a very basic publishing call looks like: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}' + publish_client.publish(topic, b'This is my message.') + +.. note:: + + The message data in Pub/Sub is an opaque blob of bytes, and as such, you + *must* send a ``bytes`` object in Python 3 (``str`` object in Python 2). + If you send a text string (``str`` in Python 3, ``unicode`` in Python 2), + the method will raise :exc:`TypeError`. + + The reason it works this way is because there is no reasonable guarantee + that the same language or environment is being used by the subscriber, + and so it is the responsibility of the publisher to properly encode + the payload. + +If you want to include attributes, simply add keyword arguments: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}' + publish_client.publish(topic, b'This is my message.', foo='bar') + + +Batching +-------- + +Whenever you publish a message, a +:class:`~.pubsub_v1.publisher.batch.thread.Batch` is automatically created. +This way, if you publish a large volume of messages, it reduces the number of +requests made to the server. + +The way that this works is that on the first message that you send, a new +:class:`~.pubsub_v1.publisher.batch.thread.Batch` is created automatically. +For every subsequent message, if there is already a valid batch that is still +accepting messages, then that batch is used. When the batch is created, it +begins a countdown that publishes the batch once sufficient time has +elapsed (by default, this is 0.05 seconds). + +If you need different batching settings, simply provide a +:class:`~.pubsub_v1.types.BatchSettings` object when you instantiate the +:class:`~.pubsub_v1.publisher.client.Client`: + +.. code-block:: python + + from google.cloud import pubsub + from google.cloud.pubsub import types + + client = pubsub.PublisherClient( + batch_settings=BatchSettings(max_messages=500), + ) + +Pub/Sub accepts a maximum of 1,000 messages in a batch, and the size of a +batch can not exceed 10 megabytes. + + +Futures +------- + +Every call to :meth:`~.pubsub_v1.publisher.client.Client.publish` will return +a class that conforms to the :class:`~concurrent.futures.Future` interface. +You can use this to ensure that the publish succeeded: + +.. code-block:: python + + # The .result() method will block until the future is complete. + # If there is an error, it will raise an exception. + future = client.publish(topic, b'My awesome message.') + message_id = future.result() + +You can also attach a callback to the future: + +.. code-block:: python + + # Callbacks receive the future as their only argument, as defined in + # the Future interface. + def callback(future): + message_id = future.result() + do_something_with(message_id) + + # The callback is added once you get the future. If you add a callback + # and the future is already done, it will simply be executed immediately. + future = client.publish(topic, b'My awesome message.') + future.add_done_callback(callback) + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + api/client diff --git a/docs/pubsub/snippets.py b/docs/pubsub/snippets.py deleted file mode 100644 index 96eea175c0cd..000000000000 --- a/docs/pubsub/snippets.py +++ /dev/null @@ -1,483 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Testable usage examples for Google Cloud Pubsub API wrapper - -Each example function takes a ``client`` argument (which must be an instance -of :class:`google.cloud.pubsub.client.Client`) and uses it to perform a task -with the API. - -To facilitate running the examples as system tests, each example is also passed -a ``to_delete`` list; the function adds to the list any objects created which -need to be deleted during teardown. -""" - -import time - -from google.cloud.pubsub.client import Client - - -def snippet(func): - """Mark ``func`` as a snippet example function.""" - func._snippet = True - return func - - -def _millis(): - return time.time() * 1000 - - -@snippet -def client_list_topics(client, to_delete): # pylint: disable=unused-argument - """List topics for a project.""" - - def do_something_with(sub): # pylint: disable=unused-argument - pass - - # [START client_list_topics] - for topic in client.list_topics(): # API request(s) - do_something_with(topic) - # [END client_list_topics] - - -@snippet -def client_list_subscriptions(client, - to_delete): # pylint: disable=unused-argument - """List all subscriptions for a project.""" - - def do_something_with(sub): # pylint: disable=unused-argument - pass - - # [START client_list_subscriptions] - for subscription in client.list_subscriptions(): # API request(s) - do_something_with(subscription) - # [END client_list_subscriptions] - - -@snippet -def client_topic(client, to_delete): # pylint: disable=unused-argument - """Topic factory.""" - TOPIC_NAME = 'topic_factory-%d' % (_millis(),) - - # [START client_topic] - topic = client.topic(TOPIC_NAME) - # [END client_topic] - - -@snippet -def client_subscription(client, to_delete): # pylint: disable=unused-argument - """Subscription factory.""" - SUBSCRIPTION_NAME = 'subscription_factory-%d' % (_millis(),) - - # [START client_subscription] - subscription = client.subscription( - SUBSCRIPTION_NAME, ack_deadline=60, - retain_acked_messages=True) - # [END client_subscription] - - -@snippet -def topic_create(client, to_delete): - """Create a topic.""" - TOPIC_NAME = 'topic_create-%d' % (_millis(),) - - # [START topic_create] - topic = client.topic(TOPIC_NAME) - topic.create() # API request - # [END topic_create] - - to_delete.append(topic) - - -@snippet -def topic_exists(client, to_delete): - """Test existence of a topic.""" - TOPIC_NAME = 'topic_exists-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - to_delete.append(topic) - - # [START topic_exists] - assert not topic.exists() # API request - topic.create() # API request - assert topic.exists() # API request - # [END topic_exists] - - -@snippet -def topic_delete(client, to_delete): # pylint: disable=unused-argument - """Delete a topic.""" - TOPIC_NAME = 'topic_delete-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() # API request - - # [START topic_delete] - assert topic.exists() # API request - topic.delete() - assert not topic.exists() # API request - # [END topic_delete] - - -@snippet -def topic_iam_policy(client, to_delete): - """Fetch / set a topic's IAM policy.""" - TOPIC_NAME = 'topic_iam_policy-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_get_iam_policy] - policy = topic.get_iam_policy() # API request - # [END topic_get_iam_policy] - - assert len(policy.viewers) == 0 - assert len(policy.editors) == 0 - assert len(policy.owners) == 0 - - # [START topic_set_iam_policy] - ALL_USERS = policy.all_users() - policy.viewers = [ALL_USERS] - LOGS_GROUP = policy.group('cloud-logs@google.com') - policy.editors = [LOGS_GROUP] - new_policy = topic.set_iam_policy(policy) # API request - # [END topic_set_iam_policy] - - assert ALL_USERS in new_policy.viewers - assert LOGS_GROUP in new_policy.editors - - -# @snippet # Disabled due to #1687 -def topic_check_iam_permissions(client, to_delete): - """Check topic IAM permissions.""" - TOPIC_NAME = 'topic_check_iam_permissions-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_check_iam_permissions] - from google.cloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE - TO_CHECK = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = topic.check_iam_permissions(TO_CHECK) - assert set(ALLOWED) == set(TO_CHECK) - # [END topic_check_iam_permissions] - - -@snippet -def topic_publish_messages(client, to_delete): - """Publish messages to a topic.""" - TOPIC_NAME = 'topic_publish_messages-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_publish_simple_message] - topic.publish(b'This is the message payload') # API request - # [END topic_publish_simple_message] - - # [START topic_publish_message_with_attrs] - topic.publish(b'Another message payload', extra='EXTRA') # API request - # [END topic_publish_message_with_attrs] - - -@snippet -def topic_subscription(client, to_delete): - """Create subscriptions to a topic.""" - TOPIC_NAME = 'topic_subscription-%d' % (_millis(),) - SUB_DEFAULTS = 'topic_subscription-defaults-%d' % (_millis(),) - SUB_ACK90 = 'topic_subscription-ack90-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_subscription_defaults] - sub_defaults = topic.subscription(SUB_DEFAULTS) - # [END topic_subscription_defaults] - - sub_defaults.create() # API request - to_delete.append(sub_defaults) - expected_names = set() - expected_names.add(sub_defaults.full_name) - - # [START topic_subscription_ack90] - sub_ack90 = topic.subscription(SUB_ACK90, ack_deadline=90) - # [END topic_subscription_ack90] - - sub_ack90.create() # API request - to_delete.append(sub_ack90) - expected_names.add(sub_ack90.full_name) - - sub_names = set() - - def do_something_with(sub): - sub_names.add(sub.full_name) - - # [START topic_list_subscriptions] - for subscription in topic.list_subscriptions(): # API request(s) - do_something_with(subscription) - # [END topic_list_subscriptions] - - assert sub_names.issuperset(expected_names) - - -# @snippet: disabled, because push-mode requires a validated endpoint URL -def topic_subscription_push(client, to_delete): - """Create subscriptions to a topic.""" - TOPIC_NAME = 'topic_subscription_push-%d' % (_millis(),) - SUB_PUSH = 'topic_subscription_push-sub-%d' % (_millis(),) - PUSH_URL = 'https://api.example.com/push-endpoint' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START topic_subscription_push] - subscription = topic.subscription(SUB_PUSH, push_endpoint=PUSH_URL) - subscription.create() # API request - # [END topic_subscription_push] - - # [START subscription_push_pull] - subscription.modify_push_configuration(push_endpoint=None) # API request - # [END subscription_push_pull] - - # [START subscription_pull_push] - subscription.modify_push_configuration( - push_endpoint=PUSH_URL) # API request - # [END subscription_pull_push] - - -@snippet -def subscription_lifecycle(client, to_delete): - """Test lifecycle of a subscription.""" - TOPIC_NAME = 'subscription_lifecycle-%d' % (_millis(),) - SUB_NAME = 'subscription_lifecycle-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - # [START subscription_create] - subscription = topic.subscription(SUB_NAME) - subscription.create() # API request - # [END subscription_create] - - # [START subscription_exists] - assert subscription.exists() # API request - # [END subscription_exists] - - # [START subscription_reload] - subscription.reload() # API request - # [END subscription_reload] - - # [START subscription_delete] - subscription.delete() # API request - # [END subscription_delete] - - -@snippet -def subscription_pull(client, to_delete): - """Pull messges from a subscribed topic.""" - TOPIC_NAME = 'subscription_pull-%d' % (_millis(),) - SUB_NAME = 'subscription_pull-defaults-%d' % (_millis(),) - PAYLOAD1 = b'PAYLOAD1' - PAYLOAD2 = b'PAYLOAD2' - EXTRA = 'EXTRA' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_pull_return_immediately] - pulled = subscription.pull(return_immediately=True) - # [END subscription_pull_return_immediately] - assert len(pulled) == 0, "unexpected message" - - topic.publish(PAYLOAD1) - topic.publish(PAYLOAD2, extra=EXTRA) - - time.sleep(1) # eventually-consistent - - # [START subscription_pull] - pulled = subscription.pull(max_messages=2) - # [END subscription_pull] - - assert len(pulled) == 2, "eventual consistency" - - # [START subscription_modify_ack_deadline] - for ack_id, _ in pulled: - subscription.modify_ack_deadline(ack_id, 90) # API request - # [END subscription_modify_ack_deadline] - - payloads = [] - extras = [] - - def do_something_with(message): # pylint: disable=unused-argument - payloads.append(message.data) - if message.attributes: - extras.append(message.attributes) - - class ApplicationException(Exception): - pass - - def log_exception(_): - pass - - # [START subscription_acknowledge] - for ack_id, message in pulled: - try: - do_something_with(message) - except ApplicationException as e: - log_exception(e) - else: - subscription.acknowledge([ack_id]) - # [END subscription_acknowledge] - - assert set(payloads) == set([PAYLOAD1, PAYLOAD2]), 'payloads: %s' % ( - (payloads,)) - assert extras == [{'extra': EXTRA}], 'extras: %s' % ( - (extras,)) - - -@snippet -def subscription_pull_w_autoack(client, to_delete): - """Pull messges from a topic, auto-acknowldging them""" - TOPIC_NAME = 'subscription_pull_autoack-%d' % (_millis(),) - SUB_NAME = 'subscription_pull_autoack-defaults-%d' % (_millis(),) - PAYLOAD1 = b'PAYLOAD1' - PAYLOAD2 = b'PAYLOAD2' - EXTRA = 'EXTRA' - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START topic_batch] - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, extra=EXTRA) - # [END topic_batch] - - time.sleep(1) # eventually-consistent - - payloads = [] - extras = [] - - def do_something_with(message): # pylint: disable=unused-argument - payloads.append(message.data) - if message.attributes: - extras.append(message.attributes) - - # [START subscription_pull_autoack] - from google.cloud.pubsub.subscription import AutoAck - with AutoAck(subscription, max_messages=10) as ack: - for ack_id, message in list(ack.items()): - try: - do_something_with(message) - except Exception: # pylint: disable=broad-except - del ack[ack_id] - # [END subscription_pull_autoack] - - assert set(payloads) == set(PAYLOAD1, PAYLOAD1), "eventual consistency" - assert extras == [{'extra': EXTRA}], "eventual consistency" - - -@snippet -def subscription_iam_policy(client, to_delete): - """Fetch / set a subscription's IAM policy.""" - TOPIC_NAME = 'subscription_iam_policy-%d' % (_millis(),) - SUB_NAME = 'subscription_iam_policy-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_get_iam_policy] - policy = subscription.get_iam_policy() # API request - # [END subscription_get_iam_policy] - - assert len(policy.viewers) == 0 - assert len(policy.editors) == 0 - assert len(policy.owners) == 0 - - # [START subscription_set_iam_policy] - ALL_USERS = policy.all_users() - policy.viewers = [ALL_USERS] - LOGS_GROUP = policy.group('cloud-logs@google.com') - policy.editors = [LOGS_GROUP] - new_policy = subscription.set_iam_policy(policy) # API request - # [END subscription_set_iam_policy] - - assert ALL_USERS in new_policy.viewers - assert LOGS_GROUP in new_policy.editors - - -# @snippet # Disabled due to #1687 -def subscription_check_iam_permissions(client, to_delete): - """Check subscription IAM permissions.""" - TOPIC_NAME = 'subscription_check_iam_permissions-%d' % (_millis(),) - SUB_NAME = 'subscription_check_iam_permissions-defaults-%d' % (_millis(),) - topic = client.topic(TOPIC_NAME) - topic.create() - to_delete.append(topic) - - subscription = topic.subscription(SUB_NAME) - subscription.create() - to_delete.append(subscription) - - # [START subscription_check_iam_permissions] - from google.cloud.pubsub.iam import OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE - TO_CHECK = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = subscription.check_iam_permissions(TO_CHECK) - assert set(ALLOWED) == set(TO_CHECK) - # [END subscription_check_iam_permissions] - - -def _line_no(func): - code = getattr(func, '__code__', None) or getattr(func, 'func_code') - return code.co_firstlineno - - -def _find_examples(): - funcs = [obj for obj in globals().values() - if getattr(obj, '_snippet', False)] - for func in sorted(funcs, key=_line_no): - yield func - - -def _name_and_doc(func): - return func.__name__, func.__doc__ - - -def main(): - client = Client() - for example in _find_examples(): - to_delete = [] - print('%-25s: %s' % _name_and_doc(example)) - try: - example(client, to_delete) - except AssertionError as e: - print(' FAIL: %s' % (e,)) - except Exception as e: # pylint: disable=broad-except - print(' ERROR: %r' % (e,)) - for item in to_delete: - item.delete() - - -if __name__ == '__main__': - main() diff --git a/docs/pubsub/subscriber/api/client.rst b/docs/pubsub/subscriber/api/client.rst new file mode 100644 index 000000000000..965880c5a640 --- /dev/null +++ b/docs/pubsub/subscriber/api/client.rst @@ -0,0 +1,6 @@ +Subscriber Client API +===================== + +.. automodule:: google.cloud.pubsub_v1.subscriber.client + :members: + :inherited-members: diff --git a/docs/pubsub/subscriber/api/message.rst b/docs/pubsub/subscriber/api/message.rst new file mode 100644 index 000000000000..d6566f4c363e --- /dev/null +++ b/docs/pubsub/subscriber/api/message.rst @@ -0,0 +1,5 @@ +Messages +======== + +.. autoclass:: google.cloud.pubsub_v1.subscriber.message.Message + :members: ack, attributes, data, nack, publish_time diff --git a/docs/pubsub/subscriber/api/policy.rst b/docs/pubsub/subscriber/api/policy.rst new file mode 100644 index 000000000000..95d288d0b974 --- /dev/null +++ b/docs/pubsub/subscriber/api/policy.rst @@ -0,0 +1,5 @@ +Subscriptions +============= + +.. autoclass:: google.cloud.pubsub_v1.subscriber.policy.thread.Policy + :members: open, close diff --git a/docs/pubsub/subscriber/index.rst b/docs/pubsub/subscriber/index.rst new file mode 100644 index 000000000000..be32a9e9ed97 --- /dev/null +++ b/docs/pubsub/subscriber/index.rst @@ -0,0 +1,123 @@ +Subscribing to Messages +======================= + +Subscribing to messages is handled through the +:class:`~.pubsub_v1.subscriber.client.Client` class (aliased as +``google.cloud.pubsub.SubscriberClient``). This class provides a +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method to +attach to subscriptions on existing topics, and (most importantly) a +:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method that +consumes messages from Pub/Sub. + +Instantiating a subscriber client is straightforward: + +.. code-block:: python + + from google.cloud import pubsub + subscriber = pubsub.SubscriberClient() + + +Creating a Subscription +----------------------- + +In Pub/Sub, a **subscription** is a discrete pull of messages from a topic. +If multiple clients pull the same subscription, then messages are split +between them. If multiple clients create a subscription each, then each client +will get every message. + +.. note:: + + Remember that Pub/Sub operates under the principle of "everything at least + once". Even in the case where multiple clients pull the same subscription, + *some* redundancy is likely. + +Creating a subscription requires that you already know what topic you want +to subscribe to, and it must already exist. Once you have that, it is easy: + +.. code-block:: python + + # Substitute {project}, {topic}, and {subscription} with appropriate + # values for your application. + topic_name = 'projects/{project}/topics/{topic}' + sub_name = 'projects/{project}/subscriptions/{subscription}' + subscriber.create_subscription(topic_name, sub_name) + + +Pulling a Subscription +---------------------- + +Once you have created a subscription (or if you already had one), the next +step is to pull data from it. This entails two steps: first you must call +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe`, passing in the +subscription string. + +.. code-block:: python + + # As before, substitute {project} and {subscription} with appropriate + # values for your application. + subscription = subscriber.subscribe( + 'projects/{project}/subscriptions/{subscription}', + ) + +This will return an object with an +:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method; calling +this method will actually begin consumption of the subscription. + + +Subscription Callbacks +---------------------- + +Because subscriptions in this Pub/Sub client are opened asychronously, +processing the messages that are yielded by the subscription is handled +through **callbacks**. + +The basic idea: Define a function that takes one argument; this argument +will be a :class:`~.pubsub_v1.subscriber.message.Message` instance. This +function should do whatever processing is necessary. At the end, the +function should :meth:`~.pubsub_v1.subscriber.message.Message.ack` the +message. + +When you call :meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open`, you +must pass the callback that will be used. + +Here is an example: + +.. code-block:: python + + # Define the callback. + # Note that the callback is defined *before* the subscription is opened. + def callback(message): + do_something_with(message) # Replace this with your acutal logic. + message.ack() + + # Open the subscription, passing the callback. + subscription.open(callback) + +Explaining Ack +-------------- + +In Pub/Sub, the term **ack** stands for "acknowledge". You should ack a +message when your processing of that message *has completed*. When you ack +a message, you are telling Pub/Sub that you do not need to see it again. + +It might be tempting to ack messages immediately on receipt. While there +are valid use cases for this, in general it is unwise. The reason why: If +there is some error or edge case in your processing logic, and processing +of the message fails, you will have already told Pub/Sub that you successfully +processed the message. By contrast, if you ack only upon completion, then +Pub/Sub will eventually re-deliver the unacknowledged message. + +It is also possible to **nack** a message, which is the opposite. When you +nack, it tells Pub/Sub that you are unable or unwilling to deal with the +message, and that the service should redeliver it. + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + api/client + api/policy + api/message diff --git a/docs/pubsub/subscription.rst b/docs/pubsub/subscription.rst deleted file mode 100644 index f242cb644e83..000000000000 --- a/docs/pubsub/subscription.rst +++ /dev/null @@ -1,7 +0,0 @@ -Subscriptions -~~~~~~~~~~~~~ - -.. automodule:: google.cloud.pubsub.subscription - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/topic.rst b/docs/pubsub/topic.rst deleted file mode 100644 index 323d467a08ce..000000000000 --- a/docs/pubsub/topic.rst +++ /dev/null @@ -1,7 +0,0 @@ -Topics -~~~~~~ - -.. automodule:: google.cloud.pubsub.topic - :members: - :member-order: bysource - :show-inheritance: diff --git a/docs/pubsub/types.rst b/docs/pubsub/types.rst new file mode 100644 index 000000000000..87c987571766 --- /dev/null +++ b/docs/pubsub/types.rst @@ -0,0 +1,5 @@ +Pub/Sub Client Types +==================== + +.. automodule:: google.cloud.pubsub_v1.types + :members: diff --git a/docs/pubsub/usage.rst b/docs/pubsub/usage.rst deleted file mode 100644 index 96727e654835..000000000000 --- a/docs/pubsub/usage.rst +++ /dev/null @@ -1,245 +0,0 @@ -Pub / Sub -========= - - -.. toctree:: - :maxdepth: 2 - :hidden: - - client - topic - subscription - message - iam - -Authentication / Configuration ------------------------------- - -- Use :class:`Client ` objects to configure - your applications. - -- In addition to any authentication configuration, you should also set the - :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd like - to interact with. If you are Google App Engine or Google Compute Engine - this will be detected automatically. - -- The library now enables the ``gRPC`` transport for the pubsub API by - default, assuming that the required dependencies are installed and - importable. To *disable* this transport, set the - :envvar:`GOOGLE_CLOUD_DISABLE_GRPC` environment variable to a - non-empty string, e.g.: ``$ export GOOGLE_CLOUD_DISABLE_GRPC=true``. - -- :class:`Client ` objects hold both a ``project`` - and an authenticated connection to the PubSub service. - -- The authentication credentials can be implicitly determined from the - environment or directly via - :meth:`from_service_account_json ` - and - :meth:`from_service_account_p12 `. - -- After setting ``GOOGLE_APPLICATION_CREDENTIALS`` and ``GOOGLE_CLOUD_PROJECT`` - environment variables, create a :class:`Client ` - - .. code-block:: python - - >>> from google.cloud import pubsub - >>> client = pubsub.Client() - - -Manage topics for a project ---------------------------- - -List topics for the default project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_topics] - :end-before: [END client_list_topics] - -Create a new topic for the default project: - -.. literalinclude:: snippets.py - :start-after: [START topic_create] - :end-before: [END topic_create] - -Check for the existence of a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_exists] - :end-before: [END topic_exists] - -Delete a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_delete] - :end-before: [END topic_delete] - -Fetch the IAM policy for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_get_iam_policy] - :end-before: [END topic_get_iam_policy] - -Update the IAM policy for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_set_iam_policy] - :end-before: [END topic_set_iam_policy] - -Test permissions allowed by the current IAM policy on a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_check_iam_permissions] - :end-before: [END topic_check_iam_permissions] - - -Publish messages to a topic ---------------------------- - -Publish a single message to a topic, without attributes: - -.. literalinclude:: snippets.py - :start-after: [START topic_publish_simple_message] - :end-before: [END topic_publish_simple_message] - -Publish a single message to a topic, with attributes: - -.. literalinclude:: snippets.py - :start-after: [START topic_publish_message_with_attrs] - :end-before: [END topic_publish_message_with_attrs] - -Publish a set of messages to a topic (as a single request): - -.. literalinclude:: snippets.py - :start-after: [START topic_batch] - :end-before: [END topic_batch] - -.. note:: - - The only API request happens during the ``__exit__()`` of the topic - used as a context manager, and only if the block exits without raising - an exception. - - -Manage subscriptions to topics ------------------------------- - -List all subscriptions for the default project: - -.. literalinclude:: snippets.py - :start-after: [START client_list_subscriptions] - :end-before: [END client_list_subscriptions] - -List subscriptions for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_list_subscriptions] - :end-before: [END topic_list_subscriptions] - -Create a new pull subscription for a topic, with defaults: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_defaults] - :end-before: [END topic_subscription_defaults] - -Create a new pull subscription for a topic with a non-default ACK deadline: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_ack90] - :end-before: [END topic_subscription_ack90] - -Create a new push subscription for a topic: - -.. literalinclude:: snippets.py - :start-after: [START topic_subscription_push] - :end-before: [END topic_subscription_push] - -Check for the existence of a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_exists] - :end-before: [END subscription_exists] - -Convert a pull subscription to push: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_push] - :end-before: [END subscription_pull_push] - -Convert a push subscription to pull: - -.. literalinclude:: snippets.py - :start-after: [START subscription_push_pull] - :end-before: [END subscription_push_pull] - -Re-synchronize a subscription with the back-end: - -.. literalinclude:: snippets.py - :start-after: [START subscription_reload] - :end-before: [END subscription_reload] - -Fetch the IAM policy for a subscription - -.. literalinclude:: snippets.py - :start-after: [START subscription_get_iam_policy] - :end-before: [END subscription_get_iam_policy] - -Update the IAM policy for a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_set_iam_policy] - :end-before: [END subscription_set_iam_policy] - -Test permissions allowed by the current IAM policy on a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_check_iam_permissions] - :end-before: [END subscription_check_iam_permissions] - -Delete a subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_delete] - :end-before: [END subscription_delete] - - -Pull messages from a subscription ---------------------------------- - -Fetch pending messages for a pull subscription: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull] - :end-before: [END subscription_pull] - -Note that received messages must be acknowledged, or else the back-end -will re-send them later: - -.. literalinclude:: snippets.py - :start-after: [START subscription_acknowledge] - :end-before: [END subscription_acknowledge] - -Fetch messages for a pull subscription without blocking (none pending): - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_return_immediately] - :end-before: [END subscription_pull_return_immediately] - -Update the acknowlegement deadline for pulled messages: - -.. literalinclude:: snippets.py - :start-after: [START subscription_modify_ack_deadline] - :end-before: [END subscription_modify_ack_deadline] - -Fetch pending messages, acknowledging those whose processing doesn't raise an -error: - -.. literalinclude:: snippets.py - :start-after: [START subscription_pull_autoack] - :end-before: [END subscription_pull_autoack] - -.. note:: - - The ``pull`` API request occurs at entry to the ``with`` block, and the - ``acknowlege`` API request occurs at the end, passing only the ``ack_ids`` - which haven't been deleted from ``ack`` diff --git a/pubsub/.coveragerc b/pubsub/.coveragerc index a54b99aa14b7..41ca7428e2ee 100644 --- a/pubsub/.coveragerc +++ b/pubsub/.coveragerc @@ -1,11 +1,17 @@ [run] branch = True +source = + google.cloud.pubsub + google.cloud.pubsub_v1 + tests.unit [report] -fail_under = 100 show_missing = True + exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py index ab8233824595..5313e0d941a1 100644 --- a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -861,16 +861,14 @@ def create_snapshot(self, name, subscription, options=None): Format is ``projects/{project}/snapshots/{snap}``. subscription (string): The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: - (a) The existing backlog on the subscription. More precisely, this is - :: + - The existing backlog on the subscription. More precisely, this is defined as the messages in the subscription's backlog that are unacknowledged upon the successful completion of the `CreateSnapshot` request; as well as: - (b) Any messages published to the subscription's topic following the - :: - + - Any messages published to the subscription's topic following the successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. options (:class:`google.gax.CallOptions`): Overrides the default settings for this call, e.g, timeout, retries etc. diff --git a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json index 4b31158fbac8..6180cc0a941f 100644 --- a/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json +++ b/pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json @@ -35,6 +35,15 @@ "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 12000, "total_timeout_millis": 600000 + }, + "streaming": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 900000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 900000, + "total_timeout_millis": 900000 } }, "methods": { @@ -79,9 +88,9 @@ "retry_params_name": "messaging" }, "StreamingPull": { - "timeout_millis": 60000, + "timeout_millis": 900000, "retry_codes_name": "pull", - "retry_params_name": "messaging" + "retry_params_name": "streaming" }, "ModifyPushConfig": { "timeout_millis": 60000, diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py index 07919f8c5646..aeee99e182d0 100644 --- a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py @@ -24,7 +24,7 @@ name='google/cloud/proto/pubsub/v1/pubsub.proto', package='google.pubsub.v1', syntax='proto3', - serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\x15\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xda\x01\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"X\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xe8\x0f\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9b\x06\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"y\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xf7\x10\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9a\x07\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -32,6 +32,43 @@ +_TOPIC_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Topic.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Topic.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Topic.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + _TOPIC = _descriptor.Descriptor( name='Topic', full_name='google.pubsub.v1.Topic', @@ -46,10 +83,17 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Topic.labels', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], - nested_types=[], + nested_types=[_TOPIC_LABELSENTRY, ], enum_types=[ ], options=None, @@ -59,7 +103,7 @@ oneofs=[ ], serialized_start=221, - serialized_end=242, + serialized_end=342, ) @@ -96,8 +140,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=415, - serialized_end=464, + serialized_start=515, + serialized_end=564, ) _PUBSUBMESSAGE = _descriptor.Descriptor( @@ -147,8 +191,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=245, - serialized_end=464, + serialized_start=345, + serialized_end=564, ) @@ -178,8 +222,46 @@ extension_ranges=[], oneofs=[ ], - serialized_start=466, - serialized_end=498, + serialized_start=566, + serialized_end=598, +) + + +_UPDATETOPICREQUEST = _descriptor.Descriptor( + name='UpdateTopicRequest', + full_name='google.pubsub.v1.UpdateTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.UpdateTopicRequest.topic', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateTopicRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=600, + serialized_end=709, ) @@ -216,8 +298,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=500, - serialized_end=582, + serialized_start=711, + serialized_end=793, ) @@ -247,8 +329,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=584, - serialized_end=622, + serialized_start=795, + serialized_end=833, ) @@ -292,8 +374,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=624, - serialized_end=699, + serialized_start=835, + serialized_end=910, ) @@ -330,8 +412,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=701, - serialized_end=787, + serialized_start=912, + serialized_end=998, ) @@ -375,8 +457,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=789, - serialized_end=874, + serialized_start=1000, + serialized_end=1085, ) @@ -413,8 +495,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=876, - serialized_end=956, + serialized_start=1087, + serialized_end=1167, ) @@ -444,11 +526,48 @@ extension_ranges=[], oneofs=[ ], - serialized_start=958, - serialized_end=993, + serialized_start=1169, + serialized_end=1204, ) +_SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Subscription.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Subscription.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Subscription.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + _SUBSCRIPTION = _descriptor.Descriptor( name='Subscription', full_name='google.pubsub.v1.Subscription', @@ -498,10 +617,17 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Subscription.labels', index=6, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], - nested_types=[], + nested_types=[_SUBSCRIPTION_LABELSENTRY, ], enum_types=[ ], options=None, @@ -510,8 +636,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=996, - serialized_end=1214, + serialized_start=1207, + serialized_end=1532, ) @@ -548,8 +674,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=415, - serialized_end=464, + serialized_start=515, + serialized_end=564, ) _PUSHCONFIG = _descriptor.Descriptor( @@ -585,8 +711,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1217, - serialized_end=1369, + serialized_start=1535, + serialized_end=1687, ) @@ -623,8 +749,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1371, - serialized_end=1454, + serialized_start=1689, + serialized_end=1772, ) @@ -654,8 +780,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1456, - serialized_end=1502, + serialized_start=1774, + serialized_end=1820, ) @@ -692,8 +818,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1505, - serialized_end=1635, + serialized_start=1823, + serialized_end=1953, ) @@ -737,8 +863,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1637, - serialized_end=1719, + serialized_start=1955, + serialized_end=2037, ) @@ -775,8 +901,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1721, - serialized_end=1828, + serialized_start=2039, + serialized_end=2146, ) @@ -806,8 +932,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1830, - serialized_end=1879, + serialized_start=2148, + serialized_end=2197, ) @@ -844,8 +970,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1881, - serialized_end=1979, + serialized_start=2199, + serialized_end=2297, ) @@ -889,8 +1015,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1981, - serialized_end=2066, + serialized_start=2299, + serialized_end=2384, ) @@ -920,8 +1046,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2068, - serialized_end=2144, + serialized_start=2386, + serialized_end=2462, ) @@ -965,8 +1091,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2146, - serialized_end=2241, + serialized_start=2464, + serialized_end=2559, ) @@ -1003,8 +1129,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2243, - serialized_end=2302, + serialized_start=2561, + serialized_end=2620, ) @@ -1062,8 +1188,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2305, - serialized_end=2469, + serialized_start=2623, + serialized_end=2787, ) @@ -1093,8 +1219,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2471, - serialized_end=2556, + serialized_start=2789, + serialized_end=2874, ) @@ -1131,11 +1257,86 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2558, - serialized_end=2617, + serialized_start=2876, + serialized_end=2935, +) + + +_UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='UpdateSnapshotRequest', + full_name='google.pubsub.v1.UpdateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.UpdateSnapshotRequest.snapshot', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSnapshotRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2937, + serialized_end=3055, ) +_SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Snapshot.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Snapshot.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Snapshot.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + _SNAPSHOT = _descriptor.Descriptor( name='Snapshot', full_name='google.pubsub.v1.Snapshot', @@ -1164,10 +1365,17 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Snapshot.labels', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], - nested_types=[], + nested_types=[_SNAPSHOT_LABELSENTRY, ], enum_types=[ ], options=None, @@ -1176,8 +1384,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2619, - serialized_end=2707, + serialized_start=3058, + serialized_end=3249, ) @@ -1221,8 +1429,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2709, - serialized_end=2787, + serialized_start=3251, + serialized_end=3329, ) @@ -1259,8 +1467,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2789, - serialized_end=2884, + serialized_start=3331, + serialized_end=3426, ) @@ -1290,8 +1498,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2886, - serialized_end=2927, + serialized_start=3428, + serialized_end=3469, ) @@ -1338,8 +1546,8 @@ name='target', full_name='google.pubsub.v1.SeekRequest.target', index=0, containing_type=None, fields=[]), ], - serialized_start=2929, - serialized_end=3038, + serialized_start=3471, + serialized_end=3580, ) @@ -1362,17 +1570,23 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3040, - serialized_end=3054, + serialized_start=3582, + serialized_end=3596, ) +_TOPIC_LABELSENTRY.containing_type = _TOPIC +_TOPIC.fields_by_name['labels'].message_type = _TOPIC_LABELSENTRY _PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE _PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY _PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_UPDATETOPICREQUEST.fields_by_name['topic'].message_type = _TOPIC +_UPDATETOPICREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK _PUBLISHREQUEST.fields_by_name['messages'].message_type = _PUBSUBMESSAGE _LISTTOPICSRESPONSE.fields_by_name['topics'].message_type = _TOPIC +_SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION _SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG _SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_SUBSCRIPTION.fields_by_name['labels'].message_type = _SUBSCRIPTION_LABELSENTRY _PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG _PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY _RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE @@ -1382,7 +1596,11 @@ _MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG _PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE _STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_UPDATESNAPSHOTREQUEST.fields_by_name['snapshot'].message_type = _SNAPSHOT +_UPDATESNAPSHOTREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT _SNAPSHOT.fields_by_name['expire_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SNAPSHOT.fields_by_name['labels'].message_type = _SNAPSHOT_LABELSENTRY _LISTSNAPSHOTSRESPONSE.fields_by_name['snapshots'].message_type = _SNAPSHOT _SEEKREQUEST.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _SEEKREQUEST.oneofs_by_name['target'].fields.append( @@ -1394,6 +1612,7 @@ DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST +DESCRIPTOR.message_types_by_name['UpdateTopicRequest'] = _UPDATETOPICREQUEST DESCRIPTOR.message_types_by_name['PublishRequest'] = _PUBLISHREQUEST DESCRIPTOR.message_types_by_name['PublishResponse'] = _PUBLISHRESPONSE DESCRIPTOR.message_types_by_name['ListTopicsRequest'] = _LISTTOPICSREQUEST @@ -1417,6 +1636,7 @@ DESCRIPTOR.message_types_by_name['StreamingPullRequest'] = _STREAMINGPULLREQUEST DESCRIPTOR.message_types_by_name['StreamingPullResponse'] = _STREAMINGPULLRESPONSE DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['UpdateSnapshotRequest'] = _UPDATESNAPSHOTREQUEST DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE @@ -1425,11 +1645,35 @@ DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _TOPIC_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) + )) + , DESCRIPTOR = _TOPIC, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A topic resource. + + + Attributes: + name: + The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters (``[A-Za-z]``), + numbers (``[0-9]``), dashes (``-``), underscores (``_``), + periods (``.``), tildes (``~``), plus (``+``) or percent signs + (``%``). It must be between 3 and 255 characters in length, + and it must not start with ``"goog"``. + labels: + User labels. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) )) _sym_db.RegisterMessage(Topic) +_sym_db.RegisterMessage(Topic.LabelsEntry) PubsubMessage = _reflection.GeneratedProtocolMessageType('PubsubMessage', (_message.Message,), dict( @@ -1441,6 +1685,28 @@ , DESCRIPTOR = _PUBSUBMESSAGE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message data and its attributes. The message payload must not be + empty; it must contain either a non-empty data field, or at least one + attribute. + + + Attributes: + data: + The message payload. + attributes: + Optional attributes for this message. + message_id: + ID of this message, assigned by the server when the message is + published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` call. + publish_time: + The time at which the message was published, populated by the + server when it receives the ``Publish`` call. It must not be + populated by the publisher in a ``Publish`` call. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) )) _sym_db.RegisterMessage(PubsubMessage) @@ -1449,13 +1715,51 @@ GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( DESCRIPTOR = _GETTOPICREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetTopic method. + + + Attributes: + topic: + The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) )) _sym_db.RegisterMessage(GetTopicRequest) +UpdateTopicRequest = _reflection.GeneratedProtocolMessageType('UpdateTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateTopic method. + + + Attributes: + topic: + The topic to update. + update_mask: + Indicates which fields in the provided topic to update. Must + be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) + )) +_sym_db.RegisterMessage(UpdateTopicRequest) + PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( DESCRIPTOR = _PUBLISHREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Publish method. + + + Attributes: + topic: + The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages: + The messages to publish. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) )) _sym_db.RegisterMessage(PublishRequest) @@ -1463,6 +1767,16 @@ PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( DESCRIPTOR = _PUBLISHRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Publish`` method. + + + Attributes: + message_ids: + The server-assigned ID of each published message, in the same + order as the messages in the request. IDs are guaranteed to be + unique within the topic. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) )) _sym_db.RegisterMessage(PublishResponse) @@ -1470,6 +1784,22 @@ ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopics`` method. + + + Attributes: + project: + The name of the cloud project that topics belong to. Format is + ``projects/{project}``. + page_size: + Maximum number of topics to return. + page_token: + The value returned by the last ``ListTopicsResponse``; + indicates that this is a continuation of a prior + ``ListTopics`` call, and that the system should return the + next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) )) _sym_db.RegisterMessage(ListTopicsRequest) @@ -1477,6 +1807,18 @@ ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopics`` method. + + + Attributes: + topics: + The resulting topics. + next_page_token: + If not empty, indicates that there may be more topics that + match the request; this value should be passed in a new + ``ListTopicsRequest``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) )) _sym_db.RegisterMessage(ListTopicsResponse) @@ -1484,6 +1826,22 @@ ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopicSubscriptions`` method. + + + Attributes: + topic: + The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size: + Maximum number of subscription names to return. + page_token: + The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) )) _sym_db.RegisterMessage(ListTopicSubscriptionsRequest) @@ -1491,6 +1849,18 @@ ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopicSubscriptions`` method. + + + Attributes: + subscriptions: + The names of the subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListTopicSubscriptionsRequest`` to get more subscriptions. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) )) _sym_db.RegisterMessage(ListTopicSubscriptionsResponse) @@ -1498,16 +1868,88 @@ DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( DESCRIPTOR = _DELETETOPICREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteTopic`` method. + + + Attributes: + topic: + Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) )) _sym_db.RegisterMessage(DeleteTopicRequest) Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTION_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) + )) + , DESCRIPTOR = _SUBSCRIPTION, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A subscription resource. + + + Attributes: + name: + The name of the subscription. It must have the format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain only + letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and 255 + characters in length, and it must not start with ``"goog"``. + topic: + The name of the topic from which this subscription is + receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this field + will be ``_deleted-topic_`` if the topic has been deleted. + push_config: + If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that + the subscriber will pull and ack messages using API methods. + ack_deadline_seconds: + This value is the maximum time after a subscriber receives a + message before the subscriber should acknowledge the message. + After message delivery but before the ack deadline expires and + before the message is acknowledged, it is an outstanding + message and will not be delivered again during that time (on a + best-effort basis). For pull subscriptions, this value is + used as the initial value for the ack deadline. To override + this value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using pull. The minimum + custom deadline you can specify is 10 seconds. The maximum + custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + For push delivery, this value is also used to set the request + timeout for the call to the push endpoint. If the subscriber + never acknowledges the message, the Pub/Sub system will + eventually redeliver the message. + retain_acked_messages: + Indicates whether to retain acknowledged messages. If true, + then messages are not expunged from the subscription's + backlog, even if they are acknowledged, until they fall out of + the ``message_retention_duration`` window. + message_retention_duration: + How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + labels: + User labels. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) )) _sym_db.RegisterMessage(Subscription) +_sym_db.RegisterMessage(Subscription.LabelsEntry) PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( @@ -1519,6 +1961,35 @@ , DESCRIPTOR = _PUSHCONFIG, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Configuration for a push delivery endpoint. + + + Attributes: + push_endpoint: + A URL locating the endpoint to which messages should be + pushed. For example, a Webhook endpoint might use + "https://example.com/push". + attributes: + Endpoint configuration attributes. Every endpoint has a set + of API supported attributes that can be used to control + different aspects of the message delivery. The currently + supported attribute is ``x-goog-version``, which you can use + to change the format of the pushed message. This attribute + indicates the version of the data expected by the endpoint. + This controls the shape of the pushed message (i.e., its + fields and metadata). The endpoint version is based on the + version of the Pub/Sub API. If not present during the + ``CreateSubscription`` call, it will default to the version of + the API used to make such call. If not present during a + ``ModifyPushConfig`` call, its value will not be changed. + ``GetSubscription`` calls will always return a valid version, + even if the subscription was created without this attribute. + The possible values for this attribute are: - ``v1beta1``: + uses the push format defined in the v1beta1 Pub/Sub API. - + ``v1`` or ``v1beta2``: uses the push format defined in the v1 + Pub/Sub API. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) )) _sym_db.RegisterMessage(PushConfig) @@ -1527,6 +1998,16 @@ ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( DESCRIPTOR = _RECEIVEDMESSAGE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message and its corresponding acknowledgment ID. + + + Attributes: + ack_id: + This ID can be used to acknowledge the received message. + message: + The message. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) )) _sym_db.RegisterMessage(ReceivedMessage) @@ -1534,6 +2015,15 @@ GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetSubscription method. + + + Attributes: + subscription: + The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) )) _sym_db.RegisterMessage(GetSubscriptionRequest) @@ -1541,6 +2031,17 @@ UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSubscription method. + + + Attributes: + subscription: + The updated subscription object. + update_mask: + Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) )) _sym_db.RegisterMessage(UpdateSubscriptionRequest) @@ -1548,6 +2049,22 @@ ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSubscriptions`` method. + + + Attributes: + project: + The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size: + Maximum number of subscriptions to return. + page_token: + The value returned by the last ``ListSubscriptionsResponse``; + indicates that this is a continuation of a prior + ``ListSubscriptions`` call, and that the system should return + the next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) )) _sym_db.RegisterMessage(ListSubscriptionsRequest) @@ -1555,6 +2072,18 @@ ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSubscriptions`` method. + + + Attributes: + subscriptions: + The subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListSubscriptionsRequest`` to get more subscriptions. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) )) _sym_db.RegisterMessage(ListSubscriptionsResponse) @@ -1562,6 +2091,15 @@ DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the DeleteSubscription method. + + + Attributes: + subscription: + The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) )) _sym_db.RegisterMessage(DeleteSubscriptionRequest) @@ -1569,6 +2107,21 @@ ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyPushConfig method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config: + The push configuration for future deliveries. An empty + ``pushConfig`` indicates that the Pub/Sub system should stop + pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) )) _sym_db.RegisterMessage(ModifyPushConfigRequest) @@ -1576,6 +2129,26 @@ PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( DESCRIPTOR = _PULLREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Pull`` method. + + + Attributes: + subscription: + The subscription from which messages should be pulled. Format + is ``projects/{project}/subscriptions/{sub}``. + return_immediately: + If this field set to true, the system will respond immediately + even if it there are no messages available to return in the + ``Pull`` response. Otherwise, the system may wait (for a + bounded amount of time) until at least one message is + available, rather than returning no messages. The client may + cancel the request if it does not wish to wait any longer for + the response. + max_messages: + The maximum number of messages returned for this request. The + Pub/Sub system may return fewer than the number specified. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) )) _sym_db.RegisterMessage(PullRequest) @@ -1583,6 +2156,18 @@ PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( DESCRIPTOR = _PULLRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Pull`` method. + + + Attributes: + received_messages: + Received Pub/Sub messages. The Pub/Sub system will return zero + messages if there are no more available in the backlog. The + Pub/Sub system may return fewer than the ``maxMessages`` + requested even if there are more messages available in the + backlog. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) )) _sym_db.RegisterMessage(PullResponse) @@ -1590,6 +2175,26 @@ ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyAckDeadline method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgment IDs. + ack_deadline_seconds: + The new ack deadline with respect to the time this request was + sent to the Pub/Sub system. For example, if the value is 10, + the new ack deadline will expire 10 seconds after the + ``ModifyAckDeadline`` call was made. Specifying zero may + immediately make the message available for another pull + request. The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 + minutes). + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) )) _sym_db.RegisterMessage(ModifyAckDeadlineRequest) @@ -1597,6 +2202,19 @@ AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( DESCRIPTOR = _ACKNOWLEDGEREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Acknowledge method. + + + Attributes: + subscription: + The subscription whose message is being acknowledged. Format + is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + The acknowledgment ID for the messages being acknowledged that + was returned by the Pub/Sub system in the ``Pull`` response. + Must not be empty. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) )) _sym_db.RegisterMessage(AcknowledgeRequest) @@ -1604,6 +2222,55 @@ StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( DESCRIPTOR = _STREAMINGPULLREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``StreamingPull`` streaming RPC method. This request is + used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to the + server. + + + Attributes: + subscription: + The subscription for which to initialize the new stream. This + must be provided in the first request on the stream, and must + not be set in subsequent requests from client to server. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgement IDs for acknowledging previously + received messages (received on this stream or a different + stream). If an ack ID has expired, the corresponding message + may be redelivered later. Acknowledging a message more than + once will not result in an error. If the acknowledgement ID is + malformed, the stream will be aborted with status + ``INVALID_ARGUMENT``. + modify_deadline_seconds: + The list of new ack deadlines for the IDs listed in + ``modify_deadline_ack_ids``. The size of this list must be the + same as the size of ``modify_deadline_ack_ids``. If it differs + the stream will be aborted with ``INVALID_ARGUMENT``. Each + element in this list is applied to the element in the same + position in ``modify_deadline_ack_ids``. The new ack deadline + is with respect to the time this request was sent to the + Pub/Sub system. Must be >= 0. For example, if the value is 10, + the new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids: + List of acknowledgement IDs whose deadline will be modified + based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if + the processing was interrupted. + stream_ack_deadline_seconds: + The ack deadline to use for the stream. This must be provided + in the first request on the stream, but it can also be updated + on subsequent requests from client to server. The minimum + deadline you can specify is 10 seconds. The maximum deadline + you can specify is 600 seconds (10 minutes). + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) )) _sym_db.RegisterMessage(StreamingPullRequest) @@ -1611,6 +2278,15 @@ StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( DESCRIPTOR = _STREAMINGPULLRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + + Attributes: + received_messages: + Received Pub/Sub messages. This will not be empty. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) )) _sym_db.RegisterMessage(StreamingPullResponse) @@ -1618,20 +2294,109 @@ CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _CREATESNAPSHOTREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``CreateSnapshot`` method. + + + Attributes: + name: + Optional user-provided name for this snapshot. If the name is + not provided in the request, the server will assign a random + name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. Format is + ``projects/{project}/snapshots/{snap}``. + subscription: + The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + (a) The existing backlog on the subscription. More precisely, + this is defined as the messages in the subscription's backlog + that are unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the successful + completion of the CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) )) _sym_db.RegisterMessage(CreateSnapshotRequest) +UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType('UpdateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSnapshot method. + + + Attributes: + snapshot: + The updated snpashot object. + update_mask: + Indicates which fields in the provided snapshot to update. + Must be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) + )) +_sym_db.RegisterMessage(UpdateSnapshotRequest) + Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SNAPSHOT_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) + )) + , DESCRIPTOR = _SNAPSHOT, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A snapshot resource. + + + Attributes: + name: + The name of the snapshot. + topic: + The name of the topic from which this snapshot is retaining + messages. + expire_time: + The snapshot is guaranteed to exist up until this time. A + newly-created snapshot expires no later than 7 days from the + time of its creation. Its exact lifetime is determined at + creation by the existing backlog in the source subscription. + Specifically, the lifetime of the snapshot is ``7 days - (age + of oldest unacked message in the subscription)``. For example, + consider a subscription whose oldest unacked message is 3 days + old. If a snapshot is created from this subscription, the + snapshot -- which will always capture this 3-day-old backlog + as long as the snapshot exists -- will expire in 4 days. + labels: + User labels. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) )) _sym_db.RegisterMessage(Snapshot) +_sym_db.RegisterMessage(Snapshot.LabelsEntry) ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTSNAPSHOTSREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSnapshots`` method. + + + Attributes: + project: + The name of the cloud project that snapshots belong to. Format + is ``projects/{project}``. + page_size: + Maximum number of snapshots to return. + page_token: + The value returned by the last ``ListSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListSnapshots`` call, and that the system should return the + next page of data. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) )) _sym_db.RegisterMessage(ListSnapshotsRequest) @@ -1639,6 +2404,18 @@ ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSnapshots`` method. + + + Attributes: + snapshots: + The resulting snapshots. + next_page_token: + If not empty, indicates that there may be more snapshot that + match the request; this value should be passed in a new + ``ListSnapshotsRequest``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) )) _sym_db.RegisterMessage(ListSnapshotsResponse) @@ -1646,6 +2423,15 @@ DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _DELETESNAPSHOTREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteSnapshot`` method. + + + Attributes: + snapshot: + The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) )) _sym_db.RegisterMessage(DeleteSnapshotRequest) @@ -1653,6 +2439,31 @@ SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( DESCRIPTOR = _SEEKREQUEST, __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Seek`` method. + + + Attributes: + subscription: + The subscription to affect. + time: + The time to seek to. Messages retained in the subscription + that were published before this time are marked as + acknowledged, and messages retained in the subscription that + were published after this time are marked as unacknowledged. + Note that this operation affects only those messages retained + in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). + For example, if ``time`` corresponds to a point before the + message retention window (or to a point before the system's + notion of the subscription creation time), only retained + messages will be marked as unacknowledged, and already- + expunged messages will not be restored. + snapshot: + The snapshot to seek to. The snapshot's topic must be the same + as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) )) _sym_db.RegisterMessage(SeekRequest) @@ -1667,10 +2478,16 @@ DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1')) +_TOPIC_LABELSENTRY.has_options = True +_TOPIC_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True _PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SUBSCRIPTION_LABELSENTRY.has_options = True +_SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _PUSHCONFIG_ATTRIBUTESENTRY.has_options = True _PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SNAPSHOT_LABELSENTRY.has_options = True +_SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) try: # THESE ELEMENTS WILL BE DEPRECATED. # Please use the generated *_pb2_grpc.py files instead. @@ -1752,6 +2569,11 @@ def __init__(self, channel): request_serializer=CreateSnapshotRequest.SerializeToString, response_deserializer=Snapshot.FromString, ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=UpdateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) self.DeleteSnapshot = channel.unary_unary( '/google.pubsub.v1.Subscriber/DeleteSnapshot', request_serializer=DeleteSnapshotRequest.SerializeToString, @@ -1795,6 +2617,10 @@ def GetSubscription(self, request, context): def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -1905,6 +2731,18 @@ def CreateSnapshot(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def DeleteSnapshot(self, request, context): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -1986,6 +2824,11 @@ def add_SubscriberServicer_to_server(servicer, server): request_deserializer=CreateSnapshotRequest.FromString, response_serializer=Snapshot.SerializeToString, ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=UpdateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( servicer.DeleteSnapshot, request_deserializer=DeleteSnapshotRequest.FromString, @@ -2018,6 +2861,11 @@ def __init__(self, channel): request_serializer=Topic.SerializeToString, response_deserializer=Topic.FromString, ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=UpdateTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) self.Publish = channel.unary_unary( '/google.pubsub.v1.Publisher/Publish', request_serializer=PublishRequest.SerializeToString, @@ -2057,6 +2905,18 @@ def CreateTopic(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -2106,6 +2966,11 @@ def add_PublisherServicer_to_server(servicer, server): request_deserializer=Topic.FromString, response_serializer=Topic.SerializeToString, ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=UpdateTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), 'Publish': grpc.unary_unary_rpc_method_handler( servicer.Publish, request_deserializer=PublishRequest.FromString, @@ -2166,6 +3031,10 @@ def GetSubscription(self, request, context): def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def ListSubscriptions(self, request, context): @@ -2246,6 +3115,15 @@ def CreateSnapshot(self, request, context): Note that for REST API requests, you must specify a name in the request. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def DeleteSnapshot(self, request, context): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -2291,6 +3169,10 @@ def GetSubscription(self, request, timeout, metadata=None, with_call=False, prot def UpdateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ raise NotImplementedError() UpdateSubscription.future = None @@ -2380,6 +3262,16 @@ def CreateSnapshot(self, request, timeout, metadata=None, with_call=False, proto """ raise NotImplementedError() CreateSnapshot.future = None + def UpdateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateSnapshot.future = None def DeleteSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -2416,6 +3308,7 @@ def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_t ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.FromString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.FromString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.FromString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.FromString, } response_serializers = { @@ -2432,6 +3325,7 @@ def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_t ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.SerializeToString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.SerializeToString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.SerializeToString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.SerializeToString, } method_implementations = { @@ -2448,6 +3342,7 @@ def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_t ('google.pubsub.v1.Subscriber', 'Pull'): face_utilities.unary_unary_inline(servicer.Pull), ('google.pubsub.v1.Subscriber', 'Seek'): face_utilities.unary_unary_inline(servicer.Seek), ('google.pubsub.v1.Subscriber', 'StreamingPull'): face_utilities.stream_stream_inline(servicer.StreamingPull), + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): face_utilities.unary_unary_inline(servicer.UpdateSnapshot), ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): face_utilities.unary_unary_inline(servicer.UpdateSubscription), } server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) @@ -2474,6 +3369,7 @@ def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, p ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.SerializeToString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.SerializeToString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.SerializeToString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.SerializeToString, } response_deserializers = { @@ -2490,6 +3386,7 @@ def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, p ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.FromString, ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.FromString, ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.FromString, ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.FromString, } cardinalities = { @@ -2506,6 +3403,7 @@ def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, p 'Pull': cardinality.Cardinality.UNARY_UNARY, 'Seek': cardinality.Cardinality.UNARY_UNARY, 'StreamingPull': cardinality.Cardinality.STREAM_STREAM, + 'UpdateSnapshot': cardinality.Cardinality.UNARY_UNARY, 'UpdateSubscription': cardinality.Cardinality.UNARY_UNARY, } stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) @@ -2525,6 +3423,15 @@ def CreateTopic(self, request, context): """Creates the given topic with the given name. """ context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -2567,6 +3474,16 @@ def CreateTopic(self, request, timeout, metadata=None, with_call=False, protocol """ raise NotImplementedError() CreateTopic.future = None + def UpdateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateTopic.future = None def Publish(self, request, timeout, metadata=None, with_call=False, protocol_options=None): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -2613,6 +3530,7 @@ def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_ti ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.FromString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.FromString, ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.FromString, } response_serializers = { ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, @@ -2621,6 +3539,7 @@ def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_ti ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.SerializeToString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.SerializeToString, ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.SerializeToString, } method_implementations = { ('google.pubsub.v1.Publisher', 'CreateTopic'): face_utilities.unary_unary_inline(servicer.CreateTopic), @@ -2629,6 +3548,7 @@ def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_ti ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): face_utilities.unary_unary_inline(servicer.ListTopicSubscriptions), ('google.pubsub.v1.Publisher', 'ListTopics'): face_utilities.unary_unary_inline(servicer.ListTopics), ('google.pubsub.v1.Publisher', 'Publish'): face_utilities.unary_unary_inline(servicer.Publish), + ('google.pubsub.v1.Publisher', 'UpdateTopic'): face_utilities.unary_unary_inline(servicer.UpdateTopic), } server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) return beta_implementations.server(method_implementations, options=server_options) @@ -2647,6 +3567,7 @@ def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, po ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.SerializeToString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.SerializeToString, ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.SerializeToString, } response_deserializers = { ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, @@ -2655,6 +3576,7 @@ def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, po ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.FromString, ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.FromString, ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.FromString, } cardinalities = { 'CreateTopic': cardinality.Cardinality.UNARY_UNARY, @@ -2663,6 +3585,7 @@ def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, po 'ListTopicSubscriptions': cardinality.Cardinality.UNARY_UNARY, 'ListTopics': cardinality.Cardinality.UNARY_UNARY, 'Publish': cardinality.Cardinality.UNARY_UNARY, + 'UpdateTopic': cardinality.Cardinality.UNARY_UNARY, } stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Publisher', cardinalities, options=stub_options) diff --git a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py index 5a970cbc77ab..06dd470470d8 100644 --- a/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py +++ b/pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py @@ -76,6 +76,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) self.DeleteSnapshot = channel.unary_unary( '/google.pubsub.v1.Subscriber/DeleteSnapshot', request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, @@ -119,6 +124,10 @@ def GetSubscription(self, request, context): def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -229,6 +238,18 @@ def CreateSnapshot(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def DeleteSnapshot(self, request, context): """Removes an existing snapshot. All messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be @@ -310,6 +331,11 @@ def add_SubscriberServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.FromString, response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( servicer.DeleteSnapshot, request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, @@ -342,6 +368,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) self.Publish = channel.unary_unary( '/google.pubsub.v1.Publisher/Publish', request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.SerializeToString, @@ -381,6 +412,18 @@ def CreateTopic(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. The message payload must not be empty; it must contain @@ -430,6 +473,11 @@ def add_PublisherServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), 'Publish': grpc.unary_unary_rpc_method_handler( servicer.Publish, request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.FromString, diff --git a/pubsub/google/cloud/pubsub.py b/pubsub/google/cloud/pubsub.py new file mode 100644 index 000000000000..bf094f6cf03a --- /dev/null +++ b/pubsub/google/cloud/pubsub.py @@ -0,0 +1,26 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import PublisherClient +from google.cloud.pubsub_v1 import SubscriberClient +from google.cloud.pubsub_v1 import types + + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', + 'types', +) diff --git a/pubsub/google/cloud/pubsub_v1/__init__.py b/pubsub/google/cloud/pubsub_v1/__init__.py new file mode 100644 index 000000000000..21706f6eee5e --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import Client as PublisherClient +from google.cloud.pubsub_v1.subscriber import Client as SubscriberClient + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', + 'types', +) diff --git a/pubsub/google/cloud/pubsub_v1/_gapic.py b/pubsub/google/cloud/pubsub_v1/_gapic.py new file mode 100644 index 000000000000..79aac7de8941 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/_gapic.py @@ -0,0 +1,73 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import functools + + +def add_methods(source_class, blacklist=()): + """Add wrapped versions of the `api` member's methods to the class. + + Any methods passed in `blacklist` are not added. + Additionally, any methods explicitly defined on the wrapped class are + not added. + """ + def wrap(wrapped_fx): + """Wrap a GAPIC method; preserve its name and docstring.""" + # If this is a static or class method, then we need to *not* + # send self as the first argument. + # + # Similarly, for instance methods, we need to send self.api rather + # than self, since that is where the actual methods were declared. + instance_method = True + self = getattr(wrapped_fx, '__self__', None) + if issubclass(type(self), type): + instance_method = False + + # Okay, we have figured out what kind of method this is; send + # down the correct wrapper function. + if instance_method: + fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) + return functools.wraps(wrapped_fx)(fx) + fx = lambda self, *a, **kw: wrapped_fx(*a, **kw) + return functools.wraps(wrapped_fx)(fx) + + def actual_decorator(cls): + # Reflectively iterate over most of the methods on the source class + # (the GAPIC) and make wrapped versions available on this client. + for name in dir(source_class): + # Ignore all private and magic methods. + if name.startswith('_'): + continue + + # Ignore anything on our blacklist. + if name in blacklist: + continue + + # Retrieve the attribute, and ignore it if it is not callable. + attr = getattr(source_class, name) + if not callable(attr): + continue + + # Add a wrapper method to this object. + fx = wrap(getattr(source_class, name)) + setattr(cls, name, fx) + + # Return the augmented class. + return cls + + # Simply return the actual decorator; this is returned from this method + # and actually used to decorate the class. + return actual_decorator diff --git a/pubsub/google/cloud/pubsub_v1/publisher/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py new file mode 100644 index 000000000000..76d54649448f --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1.publisher.client import Client + + +__all__ = ( + 'Client', +) diff --git a/pubsub/tests/__init__.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py similarity index 100% rename from pubsub/tests/__init__.py rename to pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py new file mode 100644 index 000000000000..61eea2bb9ad5 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -0,0 +1,147 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import abc +import enum + +import six + + +@six.add_metaclass(abc.ABCMeta) +class Batch(object): + """The base batching class for Pub/Sub publishing. + + Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based + on :class:`threading.Thread`, is fine for most cases, advanced + users may need to implement something based on a different concurrency + model. + + This class defines the interface for the Batch implementation; + subclasses may be passed as the ``batch_class`` argument to + :class:`~.pubsub_v1.client.PublisherClient`. + + The batching behavior works like this: When the + :class:`~.pubsub_v1.publisher.client.Client` is asked to publish a new + message, it requires a batch. The client will see if there is an + already-opened batch for the given topic; if there is, then the message + is sent to that batch. If there is not, then a new batch is created + and the message put there. + + When a new batch is created, it automatically starts a timer counting + down to the maximum latency before the batch should commit. + Essentially, if enough time passes, the batch automatically commits + regardless of how much is in it. However, if either the message count or + size thresholds are encountered first, then the batch will commit early. + """ + def __len__(self): + """Return the number of messages currently in the batch.""" + return len(self.messages) + + @property + @abc.abstractmethod + def messages(self): + """Return the messages currently in the batch. + + Returns: + Sequence: The messages currently in the batch. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. The ones represented in the + :class:`BaseBatch.Status` enum are special, but other statuses + are permitted. + """ + raise NotImplementedError + + def will_accept(self, message): + """Return True if the batch is able to accept the message. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + bool: Whether this batch can accept the message. + """ + # If this batch is not accepting messages generally, return False. + if self.status != BatchStatus.ACCEPTING_MESSAGES: + return False + + # If this batch can not hold the message in question, return False. + if self.size + message.ByteSize() > self.settings.max_bytes: + return False + + # Okay, everything is good. + return True + + @abc.abstractmethod + def publish(self, message): + """Publish a single message. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + This method is called by :meth:`~.PublisherClient.publish`. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the + :class:`concurrent.futures.Future` interface. + """ + raise NotImplementedError + + +class BatchStatus(object): + """An enum-like class representing valid statuses for a batch. + + It is acceptable for a class to use a status that is not on this + class; this represents the list of statuses where the existing + library hooks in functionality. + """ + ACCEPTING_MESSAGES = 'accepting messages' + ERROR = 'error' + SUCCESS = 'success' diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py new file mode 100644 index 000000000000..f5c08a76f315 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -0,0 +1,245 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import logging +import threading +import time + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher import futures +from google.cloud.pubsub_v1.publisher.batch import base + + +class Batch(base.Batch): + """A batch of messages. + + The batch is the internal group of messages which are either awaiting + publication or currently in-flight. + + A batch is automatically created by the PublisherClient when the first + message to be published is received; subsequent messages are added to + that batch until the process of actual publishing _starts_. + + Once this occurs, any new messages sent to :meth:`publish` open a new + batch. + + If you are using this library, you most likely do not need to instantiate + batch objects directly; they will be created for you. If you want to + change the actual batching settings, see the ``batching`` argument on + :class:`~.pubsub_v1.PublisherClient`. + + Any properties or methods on this class which are not defined in + :class:`~.pubsub_v1.publisher.batch.BaseBatch` should be considered + implementation details. + + Args: + client (~.pubsub_v1.PublisherClient): The publisher client used to + create this batch. + topic (str): The topic. The format for this is + ``projects/{project}/topics/{topic}``. + settings (~.pubsub_v1.types.BatchSettings): The settings for batch + publishing. These should be considered immutable once the batch + has been opened. + autocommit (bool): Whether to autocommit the batch when the time + has elapsed. Defaults to True unless ``settings.max_latency`` is + inf. + """ + def __init__(self, client, topic, settings, autocommit=True): + self._client = client + + # These objects are all communicated between threads; ensure that + # any writes to them are atomic. + self._futures = [] + self._messages = [] + self._size = 0 + self._settings = settings + self._status = base.BatchStatus.ACCEPTING_MESSAGES + self._topic = topic + + # If max latency is specified, start a thread to monitor the batch and + # commit when the max latency is reached. + self._thread = None + self._commit_lock = threading.Lock() + if autocommit and self._settings.max_latency < float('inf'): + self._thread = threading.Thread(target=self.monitor) + self._thread.start() + + @property + def client(self): + """~.pubsub_v1.client.PublisherClient: A publisher client.""" + return self._client + + @property + def messages(self): + """Sequence: The messages currently in the batch.""" + return self._messages + + @property + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + return self._settings + + @property + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + return self._size + + @property + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. + """ + return self._status + + def commit(self): + """Actually publish all of the messages on the active batch. + + This synchronously sets the batch status to in-flight, and then opens + a new thread, which handles actually sending the messages to Pub/Sub. + + .. note:: + + This method is non-blocking. It opens a new thread, which calls + :meth:`_commit`, which does block. + """ + # Set the status to in-flight synchronously, to ensure that + # this batch will necessarily not accept new messages. + # + # Yes, this is repeated in `_commit`, because that method is called + # directly by `monitor`. + self._status = 'in-flight' + + # Start a new thread to actually handle the commit. + commit_thread = threading.Thread(target=self._commit) + commit_thread.start() + + def _commit(self): + """Actually publish all of the messages on the active batch. + + This moves the batch out from being the active batch to an in-flight + batch on the publisher, and then the batch is discarded upon + completion. + + .. note:: + + This method blocks. The :meth:`commit` method is the non-blocking + version, which calls this one. + """ + with self._commit_lock: + # If, in the intervening period, the batch started to be committed, + # or completed a commit, then no-op at this point. + if self._status != base.BatchStatus.ACCEPTING_MESSAGES: + return + + # Update the status. + self._status = 'in-flight' + + # Sanity check: If there are no messages, no-op. + if not self._messages: + return + + # Begin the request to publish these messages. + # Log how long the underlying request takes. + start = time.time() + response = self.client.api.publish( + self._topic, + self.messages, + ) + end = time.time() + logging.getLogger().debug('gRPC Publish took {s} seconds.'.format( + s=end - start, + )) + + # We got a response from Pub/Sub; denote that we are processing. + self._status = 'processing results' + + # Sanity check: If the number of message IDs is not equal to the + # number of futures I have, then something went wrong. + if len(response.message_ids) != len(self._futures): + for future in self._futures: + future.set_exception(exceptions.PublishError( + 'Some messages were not successfully published.', + )) + return + + # Iterate over the futures on the queue and return the response + # IDs. We are trusting that there is a 1:1 mapping, and raise an + # exception if not. + self._status = base.BatchStatus.SUCCESS + for message_id, future in zip(response.message_ids, self._futures): + future.set_result(message_id) + + def monitor(self): + """Commit this batch after sufficient time has elapsed. + + This simply sleeps for ``self._settings.max_latency`` seconds, + and then calls commit unless the batch has already been committed. + """ + # Note: This thread blocks; it is up to the calling code to call it + # in a separate thread. + # + # Sleep for however long we should be waiting. + time.sleep(self._settings.max_latency) + + # Commit. + return self._commit() + + def publish(self, message): + """Publish a single message. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + This method is called by :meth:`~.PublisherClient.publish`. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + ~.pubsub_v1.publisher.futures.Future: An object conforming to + the :class:`concurrent.futures.Future` interface. + """ + # Coerce the type, just in case. + if not isinstance(message, types.PubsubMessage): + message = types.PubsubMessage(**message) + + # Add the size to the running total of the size, so we know + # if future messages need to be rejected. + self._size += message.ByteSize() + + # Store the actual message in the batch's message queue. + self._messages.append(message) + + # Return a Future. That future needs to be aware of the status + # of this batch. + f = futures.Future() + self._futures.append(f) + return f diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py new file mode 100644 index 000000000000..e80662a715ef --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -0,0 +1,161 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import copy +import pkg_resources +import threading + +import six + +from google.cloud.gapic.pubsub.v1 import publisher_client + +from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch import thread + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) +class Client(object): + """A publisher client for Google Cloud Pub/Sub. + + This creates an object that is capable of publishing messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The + settings for batch publishing. + batch_class (class): A class that describes how to handle + batches. You may subclass the + :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in + order to define your own batcher. This is primarily provided to + allow use of different concurrency models; the default + is based on :class:`threading.Thread`. + kwargs (dict): Any additional arguments provided are sent as keyword + arguments to the underlying + :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. + Generally, you should not need to set additional keyword arguments. + """ + def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = publisher_client.PublisherClient(**kwargs) + self.batch_settings = types.BatchSettings(*batch_settings) + + # The batches on the publisher client are responsible for holding + # messages. One batch exists for each topic. + self._batch_class = batch_class + self._batch_lock = threading.Lock() + self._batches = {} + + def batch(self, topic, message, create=True, autocommit=True): + """Return the current batch for the provided topic. + + This will create a new batch only if no batch currently exists. + + Args: + topic (str): A string representing the topic. + message (~google.cloud.pubsub_v1.types.PubsubMessage): The message + that will be committed. + create (bool): Whether to create a new batch if no batch is + found. Defaults to True. + autocommit (bool): Whether to autocommit this batch. + This is primarily useful for debugging. + + Returns: + ~.pubsub_v1.batch.Batch: The batch object. + """ + # If there is no matching batch yet, then potentially create one + # and place it on the batches dictionary. + with self._batch_lock: + batch = self._batches.get(topic, None) + if batch is None or not batch.will_accept(message): + if not create: + return None + batch = self._batch_class( + autocommit=autocommit, + client=self, + settings=self.batch_settings, + topic=topic, + ) + self._batches[topic] = batch + + # Simply return the appropriate batch. + return batch + + def publish(self, topic, data, **attrs): + """Publish a single message. + + .. note:: + Messages in Pub/Sub are blobs of bytes. They are *binary* data, + not text. You must send data as a bytestring + (``bytes`` in Python 3; ``str`` in Python 2), and this library + will raise an exception if you send a text string. + + The reason that this is so important (and why we do not try to + coerce for you) is because Pub/Sub is also platform independent + and there is no way to know how to decode messages properly on + the other side; therefore, encoding and decoding is a required + exercise for the developer. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + Example: + >>> from google.cloud.pubsub_v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'The rain in Wales falls mainly on the snails.' + >>> response = client.publish(topic, data, username='guido') + + Args: + topic (str): The topic to publish messages to. + data (bytes): A bytestring representing the message body. This + must be a bytestring. + attrs (Mapping[str, str]): A dictionary of attributes to be + sent as metadata. (These may be text strings or byte strings.) + + Returns: + ~concurrent.futures.Future: An object conforming to the + ``concurrent.futures.Future`` interface. + """ + # Sanity check: Is the data being sent as a bytestring? + # If it is literally anything else, complain loudly about it. + if not isinstance(data, six.binary_type): + raise TypeError('Data being published to Pub/Sub must be sent ' + 'as a bytestring.') + + # Coerce all attributes to text strings. + for k, v in copy.copy(attrs).items(): + if isinstance(v, six.text_type): + continue + if isinstance(v, six.binary_type): + attrs[k] = v.decode('utf-8') + continue + raise TypeError('All attributes being published to Pub/Sub must ' + 'be sent as text strings.') + + # Create the Pub/Sub message object. + message = types.PubsubMessage(data=data, attributes=attrs) + + # Delegate the publishing to the batch. + return self.batch(topic, message=message).publish(message) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py new file mode 100644 index 000000000000..bae090ceb9d7 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -0,0 +1,29 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent.futures import TimeoutError + +from google.api.core.exceptions import GoogleAPICallError + + +class PublishError(GoogleAPICallError): + pass + + +__all__ = ( + 'PublishError', + 'TimeoutError', +) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/futures.py b/pubsub/google/cloud/pubsub_v1/publisher/futures.py new file mode 100644 index 000000000000..cbc67d9e55c3 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -0,0 +1,169 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import threading + +import google.api.core.future +from google.cloud.pubsub_v1.publisher import exceptions + + +class Future(google.api.core.future.Future): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from asychronous Pub/Sub calls, and is the + interface to determine the status of those calls. + + This object should not be created directly, but is returned by other + methods in this library. + """ + def __init__(self): + self._callbacks = [] + self._result = None + self._exception = None + self._completed = threading.Event() + + def cancel(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def cancelled(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def running(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns True. + """ + return True + + def done(self): + """Return True if the publish has completed, False otherwise. + + This still returns True in failure cases; checking :meth:`result` or + :meth:`exception` is the canonical way to assess success or failure. + """ + return self._exception is not None or self._result is not None + + def result(self, timeout=None): + """Return the message ID, or raise an exception. + + This blocks until the message has successfully been published, and + returns the message ID. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Returns: + str: The message ID. + + Raises: + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying + call execution. + """ + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self._result + raise err + + def exception(self, timeout=None, _wait=1): + """Return the exception raised by the call, if any. + + This blocks until the message has successfully been published, and + returns the exception. If the call succeeded, return None. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + TimeoutError: If the request times out. + + Returns: + Exception: The exception raised by the call, if any. + """ + # Wait until the future is done. + if not self._completed.wait(timeout=timeout): + raise exceptions.TimeoutError('Timed out waiting for result.') + + # If the batch completed successfully, this should return None. + if self._result is not None: + return None + + # Okay, this batch had an error; this should return it. + return self._exception + + def add_done_callback(self, fn): + """Attach the provided callable to the future. + + The provided function is called, with this future as its only argument, + when the future finishes running. + """ + if self.done(): + fn(self) + self._callbacks.append(fn) + + def set_result(self, result): + """Set the result of the future to the provided result. + + Args: + result (str): The message ID. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_result can only be called once.') + + # Set the result and trigger the future. + self._result = result + self._trigger() + + def set_exception(self, exception): + """Set the result of the future to the given exception. + + Args: + exception (:exc:`Exception`): The exception raised. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_exception can only be called once.') + + # Set the exception and trigger the future. + self._exception = exception + self._trigger() + + def _trigger(self): + """Trigger all callbacks registered to this Future. + + This method is called internally by the batch once the batch + completes. + + Args: + message_id (str): The message ID, as a string. + """ + self._completed.set() + for callback in self._callbacks: + callback(self) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py new file mode 100644 index 000000000000..d98a7bb75be4 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1.subscriber.client import Client + + +__all__ = ( + 'Client', +) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py new file mode 100644 index 000000000000..9fb2567176bc --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -0,0 +1,267 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bidirectional Streaming Consumer. + +The goal here is to consume a bidirectional streaming RPC by fanning out the +responses received from the server to be processed and fanning in requests from +the response processors to be sent to the server through the request stream. +This module is a framework to deal with this pattern in a consistent way: + + * A :class:`Consumer` manages scheduling requests to a stream and consuming + responses from a stream. The Consumer takes the responses and schedules + them to be processed in callbacks using any + :class:`~concurrent.futures.Executor`. + * A :class:`Policy` which determines how the consumer calls the RPC and + processes responses, errors, and messages. + +The :class:`Policy` is the only class that's intended to be sub-classed here. +This would be implemented for every bidirectional streaming method. +How does this work? The first part of the implementation, fanning out +responses, its actually quite straightforward and can be done with just a +:class:`concurrent.futures.Executor`: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + } + +The challenge comes from the fact that in bidirectional streaming two more +things have to be done: + + 1. The consumer must maintain a long-running request generator. + 2. The consumer must provide some way for the response processor to queue + new requests. + +These are especially important because in the case of Pub/Sub you are +essentially streaming requests indefinitely and receiving responses +indefinitely. + +For the first challenge, we take advantage of the fact that gRPC runs the +request generator in its own thread. That thread can block, so we can use +a queue for that: + +.. graphviz:: + digraph response_flow { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + } + +The final piece of the puzzle, allowing things from anywhere to queue new +requests, it a bit more complex. If we were only dealing with threads, then the +response workers could just directly interact with the policy/consumer to +queue new requests: + +.. graphviz:: + digraph thread_only_requests { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "Consumer" [label="send_request", color="blue"] + } + +But, because this does not dictate any particular concurrent strategy for +dealing with the responses, it's possible that a response could be processed +in a different thread, process, or even on a different machine. Because of +this, we need an intermediary queue between the callbacks and the gRPC request +queue to bridge the "concurrecy gap". To pump items from the concurrecy-safe +queue into the gRPC request queue, we need another worker thread. Putting this +all together looks like this: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "Policy" -> "QueueCallbackThread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "callback_request_queue" [label="requests", color="blue"] + "callback_request_queue" -> "QueueCallbackThread" + [label="consumed by", color="blue"] + "QueueCallbackThread" -> "Consumer" + [label="send_response", color="blue"] + } + +This part is actually up to the Policy to enable. The consumer just provides a +thread-safe queue for requests. The :cls:`QueueCallbackThread` can be used by +the Policy implementation to spin up the worker thread to pump the +concurrency-safe queue. See the Pub/Sub subscriber implementation for an +example of this. +""" + +import logging +import queue +import threading + +from google.cloud.pubsub_v1.subscriber import _helper_threads + +_LOGGER = logging.getLogger(__name__) + + +class Consumer(object): + """Bi-directional streaming RPC consumer. + + This class coordinates the consumption of a bi-directional streaming RPC. + There is a bit of background information to know before understanding how + this class operates: + + 1. gRPC has its own background thread for dealing with I/O. + 2. gRPC consumes a streaming call's request generator in another + thread. + 3. If the request generator thread exits, gRPC will close the + connection. + + Because of (2) and (3), the consumer must always at least use threading + for some bookkeeping. No matter what, a thread will be created by gRPC to + generate requests. This thread is called the *request generator thread*. + Having the request generator thread allows the consumer to hold the stream + open indefinitely. Now gRPC will send responses as fast as the consumer can + ask for them. The consumer hands these off to the :cls:`Policy` via + :meth:`Policy.on_response`, which should not block. + + Finally, we do not want to block the main thread, so the consumer actually + invokes the RPC itself in a separate thread. This thread is called the + *response consumer helper thread*. + + So all in all there are three threads: + + 1. gRPC's internal I/O thread. + 2. The request generator thread, created by gRPC. + 3. The response consumer helper thread, created by the Consumer. + + In addition, the Consumer likely uses some sort of concurreny to prevent + blocking on processing responses. The Policy may also use another thread to + deal with pumping messages from an external queue into the request queue + here. + + It may seem strange to use threads for something "high performance" + considering the GIL. However, the threads here are not CPU bound. They are + simple threads that are blocked by I/O and generally just move around some + simple objects between queues. The overhead for these helper threads is + low. The Consumer and end-user can configure any sort of executor they want + for the actual processing of the responses, which may be CPU intensive. + """ + def __init__(self, policy): + """ + Args: + policy (Consumer): The consumer policy, which defines how + requests and responses are handled. + """ + self._policy = policy + self._request_queue = queue.Queue() + self._exiting = threading.Event() + + self.active = False + self.helper_threads = _helper_threads.HelperThreadRegistry() + """:cls:`_helper_threads.HelperThreads`: manages the helper threads. + The policy may use this to schedule its own helper threads. + """ + + def send_request(self, request): + """Queue a request to be sent to gRPC. + + Args: + request (Any): The request protobuf. + """ + self._request_queue.put(request) + + def _request_generator_thread(self): + """Generate requests for the stream. + + This blocks for new requests on the request queue and yields them to + gRPC. + """ + # First, yield the initial request. This occurs on every new + # connection, fundamentally including a resumed connection. + initial_request = self._policy.get_initial_request(ack_queue=True) + _LOGGER.debug('Sending initial request: {initial_request}'.format( + initial_request=initial_request, + )) + yield initial_request + + # Now yield each of the items on the request queue, and block if there + # are none. This can and must block to keep the stream open. + while True: + request = self._request_queue.get() + if request == _helper_threads.STOP: + _LOGGER.debug('Request generator signaled to stop.') + break + + _LOGGER.debug('Sending request: {}'.format(request)) + yield request + + def _blocking_consume(self): + """Consume the stream indefinitely.""" + while True: + # It is possible that a timeout can cause the stream to not + # exit cleanly when the user has called stop_consuming(). This + # checks to make sure we're not exiting before opening a new + # stream. + if self._exiting.is_set(): + _LOGGER.debug('Event signalled consumer exit.') + break + + request_generator = self._request_generator_thread() + response_generator = self._policy.call_rpc(request_generator) + try: + for response in response_generator: + _LOGGER.debug('Received response: {0}'.format(response)) + self._policy.on_response(response) + + # If the loop above exits without an exception, then the + # request stream terminated cleanly, which should only happen + # when it was signaled to do so by stop_consuming. In this + # case, break out of the while loop and exit this thread. + _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') + break + except KeyboardInterrupt: + self.stop_consuming() + except Exception as exc: + try: + self._policy.on_exception(exc) + except: + self.active = False + raise + + def start_consuming(self): + """Start consuming the stream.""" + self.active = True + self._exiting.clear() + self.helper_threads.start( + 'consume bidirectional stream', + self._request_queue, + self._blocking_consume, + ) + + def stop_consuming(self): + """Signal the stream to stop and block until it completes.""" + self.active = False + self._exiting.set() + self.helper_threads.stop_all() diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py new file mode 100644 index 000000000000..21e812a0d2ad --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -0,0 +1,129 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import logging +import threading +import uuid + +import six + +__all__ = ( + 'HelperThreadRegistry', + 'QueueCallbackThread', + 'STOP', +) + +_LOGGER = logging.getLogger(__name__) + +_HelperThread = collections.namedtuple( + 'HelperThreads', + ['name', 'thread', 'queue'], +) + + +# Helper thread stop indicator. This could be a sentinel object or None, +# but the sentinel object's ID can change if the process is forked, and +# None has the possibility of a user accidentally killing the helper +# thread. +STOP = uuid.uuid4() + + +class HelperThreadRegistry(object): + def __init__(self): + self._helper_threads = {} + + def __contains__(self, needle): + return needle in self._helper_threads + + def start(self, name, queue, target, *args, **kwargs): + """Create and start a helper thread. + + Args: + name (str): The name of the helper thread. + queue (Queue): A concurrency-safe queue. + target (Callable): The target of the thread. + args: Additional args passed to the thread constructor. + kwargs: Additional kwargs passed to the thread constructor. + + Returns: + threading.Thread: The created thread. + """ + # Create and start the helper thread. + thread = threading.Thread( + name='Consumer helper: {}'.format(name), + target=target, + *args, **kwargs + ) + thread.daemon = True + thread.start() + + # Keep track of the helper thread, so we are able to stop it. + self._helper_threads[name] = _HelperThread(name, thread, queue) + _LOGGER.debug('Started helper thread {}'.format(name)) + return thread + + def stop(self, name): + """Stops a helper thread. + + Sends the stop message and blocks until the thread joins. + + Args: + name (str): The name of the thread. + """ + # Attempt to retrieve the thread; if it is gone already, no-op. + helper_thread = self._helper_threads.get(name) + if helper_thread is None: + return + + # Join the thread if it is still alive. + if helper_thread.thread.is_alive(): + _LOGGER.debug('Stopping helper thread {}'.format(name)) + helper_thread.queue.put(STOP) + helper_thread.thread.join() + + # Remove the thread from our tracking. + self._helper_threads.pop(name, None) + + def stop_all(self): + """Stop all helper threads.""" + # This could be more efficient by sending the stop signal to all + # threads before joining any of them. + for name in list(six.iterkeys(self._helper_threads)): + self.stop(name) + + +class QueueCallbackThread(object): + """A helper thread that executes a callback for every item in + the queue. + """ + def __init__(self, queue, callback): + self.queue = queue + self._callback = callback + + def __call__(self): + while True: + item = self.queue.get() + if item == STOP: + break + + # Run the callback. If any exceptions occur, log them and + # continue. + try: + self._callback(item) + except Exception as exc: + _LOGGER.error('{class_}: {message}'.format( + class_=exc.__class__.__name__, + message=str(exc), + )) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py b/pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py new file mode 100644 index 000000000000..09f047495896 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py @@ -0,0 +1,155 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division + + +class Histogram(object): + """Representation of a single histogram. + + The purpose of this class is to store actual ack timing information + in order to predict how long to renew leases. + + The default implementation uses the 99th percentile of previous ack + times to implicitly lease messages; however, custom + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer` subclasses + are free to use a different formula. + + The precision of data stored is to the nearest integer. Additionally, + values outside the range of ``10 <= x <= 600`` are stored as ``10`` or + ``600``, since these are the boundaries of leases in the actual API. + """ + def __init__(self, data=None): + """Instantiate the histogram. + + Args: + data (Mapping[str, int]): The data strucure to be used to store + the underlying data. The default is an empty dictionary. + This can be set to a dictionary-like object if required + (for example, if a special object is needed for + concurrency reasons). + """ + # The data is stored as a dictionary, with the keys being the + # value being added and the values being the number of times that + # value was added to the dictionary. + # + # This is depending on the Python interpreter's implicit ordering + # of dictionaries, which is a bitwise sort by the key's ``hash()`` + # value. Because ``hash(int i) -> i`` and all of our keys are + # positive integers (negatives would be a problem because the sort + # is bitwise), we can rely on this. + if data is None: + data = {} + self._data = data + self._len = 0 + + def __len__(self): + """Return the total number of data points in this histogram. + + This is cached on a separate counter (rather than computing it using + ``sum([v for v in self._data.values()])``) to optimize lookup. + + Returns: + int: The total number of data points in this histogram. + """ + return self._len + + def __contains__(self, needle): + """Return True if needle is present in the histogram, False otherwise. + + Returns: + bool: True or False + """ + return needle in self._data + + def __repr__(self): + return ''.format( + len=len(self), + max=self.max, + min=self.min, + ) + + @property + def max(self): + """Return the maximum value in this histogram. + + If there are no values in the histogram at all, return 600. + + Returns: + int: The maximum value in the histogram. + """ + if len(self._data) == 0: + return 600 + return next(iter(reversed(sorted(self._data.keys())))) + + @property + def min(self): + """Return the minimum value in this histogram. + + If there are no values in the histogram at all, return 10. + + Returns: + int: The minimum value in the histogram. + """ + if len(self._data) == 0: + return 10 + return next(iter(sorted(self._data.keys()))) + + def add(self, value): + """Add the value to this histogram. + + Args: + value (int): The value. Values outside of ``10 <= x <= 600`` + will be raised to ``10`` or reduced to ``600``. + """ + # If the value is out of bounds, bring it in bounds. + value = int(value) + if value < 10: + value = 10 + if value > 600: + value = 600 + + # Add the value to the histogram's data dictionary. + self._data.setdefault(value, 0) + self._data[value] += 1 + self._len += 1 + + def percentile(self, percent): + """Return the value that is the Nth precentile in the histogram. + + Args: + percent (Union[int, float]): The precentile being sought. The + default consumer implementations use consistently use ``99``. + + Returns: + int: The value corresponding to the requested percentile. + """ + # Sanity check: Any value over 100 should become 100. + if percent >= 100: + percent = 100 + + # Determine the actual target number. + target = len(self) - len(self) * (percent / 100) + + # Iterate over the values in reverse, dropping the target by the + # number of times each value has been seen. When the target passes + # 0, return the value we are currently viewing. + for k in reversed(sorted(self._data.keys())): + target -= self._data[k] + if target < 0: + return k + + # The only way to get here is if there was no data. + # In this case, just return 10 seconds. + return 10 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py new file mode 100644 index 000000000000..afb9f7d7ca75 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -0,0 +1,98 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import pkg_resources + +from google.cloud.gapic.pubsub.v1 import subscriber_client + +from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.policy import thread + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(subscriber_client.SubscriberClient, + blacklist=('pull', 'streaming_pull')) +class Client(object): + """A subscriber client for Google Cloud Pub/Sub. + + This creates an object that is capable of subscribing to messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + policy_class (class): A class that describes how to handle + subscriptions. You may subclass the + :class:`.pubsub_v1.subscriber.policy.base.BasePolicy` + class in order to define your own consumer. This is primarily + provided to allow use of different concurrency models; the default + is based on :class:`threading.Thread`. + kwargs (dict): Any additional arguments provided are sent as keyword + keyword arguments to the underlying + :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. + Generally, you should not need to set additional keyword + arguments. + """ + def __init__(self, policy_class=thread.Policy, **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = subscriber_client.SubscriberClient(**kwargs) + + # The subcription class is responsible to retrieving and dispatching + # messages. + self._policy_class = policy_class + + def subscribe(self, subscription, callback=None, flow_control=()): + """Return a representation of an individual subscription. + + This method creates and returns a ``Consumer`` object (that is, a + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer`) + subclass) bound to the topic. It does `not` create the subcription + on the backend (or do any API call at all); it simply returns an + object capable of doing these things. + + If the ``callback`` argument is provided, then the :meth:`open` method + is automatically called on the returned object. If ``callback`` is + not provided, the subscription is returned unopened. + + .. note:: + It only makes sense to provide ``callback`` here if you have + already created the subscription manually in the API. + + Args: + subscription (str): The name of the subscription. The + subscription should have already been created (for example, + by using :meth:`create_subscription`). + callback (function): The callback function. This function receives + the :class:`~.pubsub_v1.types.PubsubMessage` as its only + argument. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. Use this to prevent situations where you are + inundated with too many messages at once. + + Returns: + ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance + of the defined ``consumer_class`` on the client. + """ + flow_control = types.FlowControl(*flow_control) + subscr = self._policy_class(self, subscription, flow_control) + if callable(callback): + subscr.open(callback) + return subscr diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/message.py b/pubsub/google/cloud/pubsub_v1/subscriber/message.py new file mode 100644 index 000000000000..1015149cfbbf --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -0,0 +1,198 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import math +import time + + +class Message(object): + """A representation of a single Pub/Sub message. + + The common way to interact with + :class:`~.pubsub_v1.subscriber.message.Message` objects is to receive + them in callbacks on subscriptions; most users should never have a need + to instantiate them by hand. (The exception to this is if you are + implementing a custom subclass to + :class:`~.pubsub_v1.subscriber.consumer.BaseConsumer`.) + + Attributes: + message_id (str): The message ID. In general, you should not need + to use this directly. + data (bytes): The data in the message. Note that this will be a + :class:`bytes`, not a text string. + attributes (dict): The attributes sent along with the message. + publish_time (datetime): The time that this message was originally + published. + """ + def __init__(self, message, ack_id, request_queue): + """Construct the Message. + + .. note:: + + This class should not be constructed directly; it is the + responsibility of :class:`BasePolicy` subclasses to do so. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The message received + from Pub/Sub. + ack_id (str): The ack_id received from Pub/Sub. + request_queue (queue.Queue): A queue provided by the policy that + can accept requests; the policy is responsible for handling + those requests. + """ + self._message = message + self._ack_id = ack_id + self._request_queue = request_queue + self.message_id = message.message_id + + # The instantiation time is the time that this message + # was received. Tracking this provides us a way to be smart about + # the default lease deadline. + self._received_timestamp = time.time() + + # The policy should lease this message, telling PubSub that it has + # it until it is acked or otherwise dropped. + self.lease() + + def __repr__(self): + # Get an abbreviated version of the data. + abbv_data = self._message.data + if len(abbv_data) > 50: + abbv_data = abbv_data[0:50] + b'...' + + # Return a useful representation. + answer = 'Message {\n' + answer += ' data: {0!r}\n'.format(abbv_data) + answer += ' attributes: {0!r}\n'.format(self.attributes) + answer += '}' + return answer + + @property + def attributes(self): + """Return the attributes of the underlying Pub/Sub Message. + + Returns: + dict: The message's attributes. + """ + return self._message.attributes + + @property + def data(self): + """Return the data for the underlying Pub/Sub Message. + + Returns: + bytes: The message data. This is always a bytestring; if you + want a text string, call :meth:`bytes.decode`. + """ + return self._message.data + + @property + def publish_time(self): + """Return the time that the message was originally published. + + Returns: + datetime: The date and time that the message was published. + """ + return self._message.publish_time + + @property + def size(self): + """Return the size of the underlying message, in bytes.""" + return self._message.ByteSize() + + def ack(self): + """Acknowledge the given message. + + Acknowledging a message in Pub/Sub means that you are done + with it, and it will not be delivered to this subscription again. + You should avoid acknowledging messages until you have + *finished* processing them, so that in the event of a failure, + you receive the message again. + + .. warning:: + Acks in Pub/Sub are best effort. You should always + ensure that your processing code is idempotent, as you may + receive any given message more than once. + """ + time_to_ack = math.ceil(time.time() - self._received_timestamp) + self._request_queue.put(('ack', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + 'time_to_ack': time_to_ack, + })) + + def drop(self): + """Release the message from lease management. + + This informs the policy to no longer hold on to the lease for this + message. Pub/Sub will re-deliver the message if it is not acknowledged + before the existing lease expires. + + .. warning:: + For most use cases, the only reason to drop a message from + lease management is on :meth:`ack` or :meth:`nack`; these methods + both call this one. You probably do not want to call this method + directly. + """ + self._request_queue.put(('drop', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) + + def lease(self): + """Inform the policy to lease this message continually. + + .. note:: + This method is called by the constructor, and you should never + need to call it manually. + """ + self._request_queue.put(('lease', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) + + def modify_ack_deadline(self, seconds): + """Set the deadline for acknowledgement to the given value. + + The default implementation handles this for you; you should not need + to manually deal with setting ack deadlines. The exception case is + if you are implementing your own custom subclass of + :class:`~.pubsub_v1.subcriber.consumer.BaseConsumer`. + + .. note:: + This is not an extension; it *sets* the deadline to the given + number of seconds from right now. It is even possible to use this + method to make a deadline shorter. + + Args: + seconds (int): The number of seconds to set the lease deadline + to. This should be between 0 and 600. Due to network latency, + values below 10 are advised against. + """ + self._request_queue.put(('modify_ack_deadline', { + 'ack_id': self._ack_id, + 'seconds': seconds, + })) + + def nack(self): + """Decline to acknowldge the given message. + + This will cause the message to be re-delivered to the subscription. + """ + self._request_queue.put(('nack', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py new file mode 100644 index 000000000000..85d047eb9439 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -0,0 +1,392 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division + +import abc +import logging +import random +import time + +import six + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber import _histogram + +logger = logging.getLogger(__name__) + + +@six.add_metaclass(abc.ABCMeta) +class BasePolicy(object): + """Abstract class defining a subscription policy. + + Although the :class:`~.pubsub_v1.subscriber.policy.thread.Policy` class, + based on :class:`threading.Thread`, is fine for most cases, + advanced users may need to implement something based on a different + concurrency model. + + This class defines the interface for the policy implementation; + subclasses may be passed as the ``policy_class`` argument to + :class:`~.pubsub_v1.client.SubscriberClient`. + """ + def __init__(self, client, subscription, + flow_control=types.FlowControl(), histogram_data=None): + """Instantiate the policy. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. + histogram_data (dict): Optional: A structure to store the histogram + data for predicting appropriate ack times. If set, this should + be a dictionary-like object. + + .. note:: + Additionally, the histogram relies on the assumption + that the dictionary will properly sort keys provided + that all keys are positive integers. If you are sending + your own dictionary class, ensure this assumption holds + or you will get strange behavior. + """ + self._client = client + self._subscription = subscription + self._consumer = _consumer.Consumer(self) + self._ack_deadline = 10 + self._last_histogram_size = 0 + self.flow_control = flow_control + self.histogram = _histogram.Histogram(data=histogram_data) + + # These are for internal flow control tracking. + # They should not need to be used by subclasses. + self._bytes = 0 + self._ack_on_resume = set() + self._paused = False + + @property + def ack_deadline(self): + """Return the appropriate ack deadline. + + This method is "sticky". It will only perform the computations to + check on the right ack deadline if the histogram has gained a + significant amount of new information. + + Returns: + int: The correct ack deadline. + """ + target = min([ + self._last_histogram_size * 2, + self._last_histogram_size + 100, + ]) + if len(self.histogram) > target: + self._ack_deadline = self.histogram.percentile(percent=99) + return self._ack_deadline + + @property + def managed_ack_ids(self): + """Return the ack IDs currently being managed by the policy. + + Returns: + set: The set of ack IDs being managed. + """ + if not hasattr(self, '_managed_ack_ids'): + self._managed_ack_ids = set() + return self._managed_ack_ids + + @property + def subscription(self): + """Return the subscription. + + Returns: + str: The subscription + """ + return self._subscription + + @property + def _load(self): + """Return the current load. + + The load is represented as a float, where 1.0 represents having + hit one of the flow control limits, and values between 0.0 and 1.0 + represent how close we are to them. (0.5 means we have exactly half + of what the flow control setting allows, for example.) + + There are (currently) two flow control settings; this property + computes how close the subscriber is to each of them, and returns + whichever value is higher. (It does not matter that we have lots of + running room on setting A if setting B is over.) + + Returns: + float: The load value. + """ + return max([ + len(self.managed_ack_ids) / self.flow_control.max_messages, + self._bytes / self.flow_control.max_bytes, + ]) + + def ack(self, ack_id, time_to_ack=None, byte_size=None): + """Acknowledge the message corresponding to the given ack_id. + + Args: + ack_id (str): The ack ID. + time_to_ack (int): The time it took to ack the message, measured + from when it was received from the subscription. This is used + to improve the automatic ack timing. + byte_size (int): The size of the PubSub message, in bytes. + """ + # If we got timing information, add it to the histogram. + if time_to_ack is not None: + self.histogram.add(int(time_to_ack)) + + # Send the request to ack the message. + # However, if the consumer is inactive, then queue the ack_id here + # instead; it will be acked as part of the initial request when the + # consumer is started again. + if self._consumer.active: + request = types.StreamingPullRequest(ack_ids=[ack_id]) + self._consumer.send_request(request) + else: + self._ack_on_resume.add(ack_id) + + # Remove the message from lease management. + self.drop(ack_id=ack_id, byte_size=byte_size) + + def call_rpc(self, request_generator): + """Invoke the Pub/Sub streaming pull RPC. + + Args: + request_generator (Generator): A generator that yields requests, + and blocks if there are no outstanding requests (until such + time as there are). + """ + return self._client.api.streaming_pull(request_generator) + + def drop(self, ack_id, byte_size): + """Remove the given ack ID from lease management. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + # Remove the ack ID from lease management, and decrement the + # byte counter. + if ack_id in self.managed_ack_ids: + self.managed_ack_ids.remove(ack_id) + self._bytes -= byte_size + self._bytes = min([self._bytes, 0]) + + # If we have been paused by flow control, check and see if we are + # back within our limits. + # + # In order to not thrash too much, require us to have passed below + # the resume threshold (80% by default) of each flow control setting + # before restarting. + if self._paused and self._load < self.flow_control.resume_threshold: + self._paused = False + self.open(self._callback) + + def get_initial_request(self, ack_queue=False): + """Return the initial request. + + This defines the initial request that must always be sent to Pub/Sub + immediately upon opening the subscription. + + Args: + ack_queue (bool): Whether to include any acks that were sent + while the connection was paused. + + Returns: + ~.pubsub_v1.types.StreamingPullRequest: A request suitable + for being the first request on the stream (and not suitable + for any other purpose). + + .. note:: + If ``ack_queue`` is set to True, this includes the ack_ids, but + also clears the internal set. + + This means that calls to :meth:`get_initial_request` with + ``ack_queue`` set to True are not idempotent. + """ + # Any ack IDs that are under lease management and not being acked + # need to have their deadline extended immediately. + ack_ids = set() + lease_ids = self.managed_ack_ids + if ack_queue: + ack_ids = self._ack_on_resume + lease_ids = lease_ids.difference(ack_ids) + + # Put the request together. + request = types.StreamingPullRequest( + ack_ids=list(ack_ids), + modify_deadline_ack_ids=list(lease_ids), + modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), + stream_ack_deadline_seconds=self.histogram.percentile(99), + subscription=self.subscription, + ) + + # Clear the ack_ids set. + # Note: If `ack_queue` is False, this just ends up being a no-op, + # since the set is just an empty set. + ack_ids.clear() + + # Return the initial request. + return request + + def lease(self, ack_id, byte_size): + """Add the given ack ID to lease management. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + # Add the ack ID to the set of managed ack IDs, and increment + # the size counter. + if ack_id not in self.managed_ack_ids: + self.managed_ack_ids.add(ack_id) + self._bytes += byte_size + + # Sanity check: Do we have too many things in our inventory? + # If we do, we need to stop the stream. + if self._load >= 1.0: + self._paused = True + self.close() + + def maintain_leases(self): + """Maintain all of the leases being managed by the policy. + + This method modifies the ack deadline for all of the managed + ack IDs, then waits for most of that time (but with jitter), and + then calls itself. + + .. warning:: + This method blocks, and generally should be run in a separate + thread or process. + + Additionally, you should not have to call this method yourself, + unless you are implementing your own policy. If you are + implementing your own policy, you _should_ call this method + in an appropriate form of subprocess. + """ + while True: + # Sanity check: Should this infinitely loop quit? + if not self._consumer.active: + return + + # Determine the appropriate duration for the lease. This is + # based off of how long previous messages have taken to ack, with + # a sensible default and within the ranges allowed by Pub/Sub. + p99 = self.histogram.percentile(99) + logger.debug('The current p99 value is %d seconds.' % p99) + + # Create a streaming pull request. + # We do not actually call `modify_ack_deadline` over and over + # because it is more efficient to make a single request. + ack_ids = list(self.managed_ack_ids) + logger.debug('Renewing lease for %d ack IDs.' % len(ack_ids)) + if len(ack_ids) > 0 and self._consumer.active: + request = types.StreamingPullRequest( + modify_deadline_ack_ids=ack_ids, + modify_deadline_seconds=[p99] * len(ack_ids), + ) + self._consumer.send_request(request) + + # Now wait an appropriate period of time and do this again. + # + # We determine the appropriate period of time based on a random + # period between 0 seconds and 90% of the lease. This use of + # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases + # where there are many clients. + snooze = random.uniform(0.0, p99 * 0.9) + logger.debug('Snoozing lease management for %f seconds.' % snooze) + time.sleep(snooze) + + def modify_ack_deadline(self, ack_id, seconds): + """Modify the ack deadline for the given ack_id. + + Args: + ack_id (str): The ack ID + seconds (int): The number of seconds to set the new deadline to. + """ + request = types.StreamingPullRequest( + modify_deadline_ack_ids=[ack_id], + modify_deadline_seconds=[seconds], + ) + self._consumer.send_request(request) + + def nack(self, ack_id, byte_size=None): + """Explicitly deny receipt of a message. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + self.modify_ack_deadline(ack_id=ack_id, seconds=0) + self.drop(ack_id=ack_id, byte_size=byte_size) + + @abc.abstractmethod + def close(self): + """Close the existing connection.""" + raise NotImplementedError + + @abc.abstractmethod + def on_exception(self, exception): + """Called when a gRPC exception occurs. + + If this method does nothing, then the stream is re-started. If this + raises an exception, it will stop the consumer thread. + This is executed on the response consumer helper thread. + + Args: + exception (Exception): The exception raised by the RPC. + """ + raise NotImplementedError + + @abc.abstractmethod + def on_response(self, response): + """Process a response from gRPC. + + This gives the consumer control over how responses are scheduled to + be processed. This method is expected to not block and instead + schedule the response to be consumed by some sort of concurrency. + + For example, if a the Policy implementation takes a callback in its + constructor, you can schedule the callback using a + :cls:`concurrent.futures.ThreadPoolExecutor`:: + + self._pool.submit(self._callback, response) + + This is called from the response consumer helper thread. + + Args: + response (Any): The protobuf response from the RPC. + """ + raise NotImplementedError + + @abc.abstractmethod + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + + Args: + callback (Callable[Message]): A callable that receives a + Pub/Sub Message. + """ + raise NotImplementedError diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py new file mode 100644 index 000000000000..df0f965748de --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -0,0 +1,147 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent import futures +from queue import Queue +import logging +import threading + +import grpc + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber.policy import base +from google.cloud.pubsub_v1.subscriber.message import Message + + +logger = logging.getLogger(__name__) + + +class Policy(base.BasePolicy): + """A consumer class based on :class:`threading.Thread`. + + This consumer handles the connection to the Pub/Sub service and all of + the concurrency needs. + """ + def __init__(self, client, subscription, flow_control=types.FlowControl(), + executor=None, queue=None): + """Instantiate the policy. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. + executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A + ThreadPoolExecutor instance, or anything duck-type compatible + with it. + queue (~queue.Queue): (Optional.) A Queue instance, appropriate + for crossing the concurrency boundary implemented by + ``executor``. + """ + # Default the callback to a no-op; it is provided by `.open`. + self._callback = lambda message: None + + # Create a queue for keeping track of shared state. + if queue is None: + queue = Queue() + self._request_queue = Queue() + + # Call the superclass constructor. + super(Policy, self).__init__( + client=client, + flow_control=flow_control, + subscription=subscription, + ) + + # Also maintain a request queue and an executor. + logger.debug('Creating callback requests thread (not starting).') + if executor is None: + executor = futures.ThreadPoolExecutor(max_workers=10) + self._executor = executor + self._callback_requests = _helper_threads.QueueCallbackThread( + self._request_queue, + self.on_callback_request, + ) + + def close(self): + """Close the existing connection.""" + # Close the main subscription connection. + self._consumer.helper_threads.stop('callback requests worker') + self._consumer.stop_consuming() + + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + + Args: + callback (Callable): The callback function. + """ + # Start the thread to pass the requests. + logger.debug('Starting callback requests worker.') + self._callback = callback + self._consumer.helper_threads.start( + 'callback requests worker', + self._request_queue, + self._callback_requests, + ) + + # Actually start consuming messages. + self._consumer.start_consuming() + + # Spawn a helper thread that maintains all of the leases for + # this policy. + logger.debug('Spawning lease maintenance worker.') + self._leaser = threading.Thread(target=self.maintain_leases) + self._leaser.daemon = True + self._leaser.start() + + def on_callback_request(self, callback_request): + """Map the callback request to the appropriate GRPC request.""" + action, kwargs = callback_request[0], callback_request[1] + getattr(self, action)(**kwargs) + + def on_exception(self, exception): + """Bubble the exception. + + This will cause the stream to exit loudly. + """ + # If this is DEADLINE_EXCEEDED, then we want to retry. + # That entails just returning None. + deadline_exceeded = grpc.StatusCode.DEADLINE_EXCEEDED + if getattr(exception, 'code', lambda: None)() == deadline_exceeded: + return + + # Raise any other exception. + raise exception + + def on_response(self, response): + """Process all received Pub/Sub messages. + + For each message, schedule a callback with the executor. + """ + for msg in response.received_messages: + logger.debug('New message received from Pub/Sub: %r', msg) + logger.debug(self._callback) + message = Message(msg.message, msg.ack_id, self._request_queue) + future = self._executor.submit(self._callback, message) + logger.debug('Result: %s' % future.result()) diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py new file mode 100644 index 000000000000..a9de4a88f7f8 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -0,0 +1,70 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import sys + +import psutil + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.gax.utils.messages import get_messages +from google.protobuf import timestamp_pb2 + + +# Define the default values for batching. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +BatchSettings = collections.namedtuple( + 'BatchSettings', + ['max_bytes', 'max_latency', 'max_messages'], +) +BatchSettings.__new__.__defaults__ = ( + 1024 * 1024 * 5, # max_bytes: 5 MB + 0.05, # max_latency: 0.05 seconds + 1000, # max_messages: 1,000 +) + +# Define the type class and default values for flow control settings. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +FlowControl = collections.namedtuple( + 'FlowControl', + ['max_bytes', 'max_messages', 'resume_threshold'], +) +FlowControl.__new__.__defaults__ = ( + psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM + float('inf'), # max_messages: no limit + 0.8, # resume_threshold: 80% +) + + +# Pub/Sub uses timestamps from the common protobuf package. +# Do not make users import from there. +Timestamp = timestamp_pb2.Timestamp + + +_names = ['BatchSettings', 'FlowControl', 'Timestamp'] +for name, message in get_messages(pubsub_pb2).items(): + message.__module__ = 'google.cloud.pubsub_v1.types' + setattr(sys.modules[__name__], name, message) + _names.append(name) + + +__all__ = tuple(sorted(_names)) diff --git a/pubsub/nox.py b/pubsub/nox.py index 4bcecafe66b4..c860e0741fe6 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -38,10 +38,10 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.pubsub', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', '--quiet', '--cov-append', '--cov-report=', + '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', + '--cov-config=.coveragerc', 'tests/unit', ) @@ -87,7 +87,8 @@ def lint(session): '--library-filesets', 'google', '--test-filesets', 'tests', # Temporarily allow this to fail. - success_codes=range(0, 100)) + success_codes=range(0, 100), + ) @nox.session diff --git a/pubsub/setup.py b/pubsub/setup.py index 71fee1dd7b8f..91bbeb8e2a8c 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', + 'author_email': 'googleapis-packages@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,9 +51,11 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.26.0, < 0.27dev', - 'grpcio >= 1.2.0, < 2.0dev', - 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', + 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', + 'grpcio >= 1.0.2, < 2.0dev', + 'psutil >= 5.2.2, < 6.0dev', ] setup( diff --git a/pubsub/tests/system.py b/pubsub/tests/system.py index eddfd1274da0..02666eae676a 100644 --- a/pubsub/tests/system.py +++ b/pubsub/tests/system.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. +# Copyright 2017, Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,395 +12,95 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime -import os -import unittest +from __future__ import absolute_import -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from grpc import StatusCode -import requests +import time +import uuid -from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.exceptions import Conflict -from google.cloud.pubsub import client +import mock +import six -from test_utils.retry import RetryInstanceState -from test_utils.retry import RetryResult -from test_utils.retry import RetryErrors -from test_utils.system import EmulatorCreds -from test_utils.system import unique_resource_id +from google import auth +from google.cloud import pubsub_v1 -def _unavailable(exc): - return exc_to_code(exc) == StatusCode.UNAVAILABLE +def _resource_name(resource_type): + """Return a randomly selected name for a resource. - -retry_unavailable = RetryErrors(GaxError, _unavailable) - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - CLIENT = None - IN_EMULATOR = False - - -def setUpModule(): - Config.IN_EMULATOR = os.getenv(PUBSUB_EMULATOR) is not None - if Config.IN_EMULATOR: - credentials = EmulatorCreds() - http = requests.Session() # Un-authorized. - Config.CLIENT = client.Client( - credentials=credentials, _http=http) - else: - Config.CLIENT = client.Client() - - -def _consume_topics(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve topics. - - :rtype: list - :returns: List of all topics encountered. - """ - return list(pubsub_client.list_topics()) - - -def _consume_snapshots(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve snapshots. - - :rtype: list - :returns: List of all snapshots encountered. - """ - return list(pubsub_client.list_snapshots()) - - -def _consume_subscriptions(topic): - """Consume entire iterator. - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: Topic to use to retrieve subscriptions. - - :rtype: list - :returns: List of all subscriptions encountered. + Args: + resource_type (str): The resource for which a name is being + generated. Should be singular (e.g. "topic", "subscription") """ - return list(topic.list_subscriptions()) - - -class TestPubsub(unittest.TestCase): - - def setUp(self): - self.to_delete = [] - - def tearDown(self): - for doomed in self.to_delete: - doomed.delete() - - def test_create_topic(self): - topic_name = 'a-new-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - self.assertTrue(topic.exists()) - self.assertEqual(topic.name, topic_name) - - with self.assertRaises(Conflict): - topic.create() - - def test_list_topics(self): - before = _consume_topics(Config.CLIENT) - topics_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for topic_name in topics_to_create: - topic = Config.CLIENT.topic(topic_name) - topic.create() - self.to_delete.append(topic) - - # Retrieve the topics. - def _all_created(result): - return len(result) == len(before) + len(topics_to_create) - - retry = RetryResult(_all_created) - after = retry(_consume_topics)(Config.CLIENT) - - created = [topic for topic in after - if topic.name in topics_to_create and - topic.project == Config.CLIENT.project] - self.assertEqual(len(created), len(topics_to_create)) - - def test_create_subscription_defaults(self): - TOPIC_NAME = 'create-sub-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - with self.assertRaises(Conflict): - subscription.create() - - def test_create_subscription_w_ack_deadline(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=120) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertEqual(subscription.ack_deadline, 120) - self.assertIs(subscription.topic, topic) - - def test_create_subscription_w_message_retention(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - duration = datetime.timedelta(hours=12) - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True, - message_retention_duration=duration) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertTrue(subscription.retain_acked_messages) - self.assertEqual(subscription.message_retention_duration, duration) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions(self): - TOPIC_NAME = 'list-sub' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - topic.create() - self.to_delete.append(topic) - empty = _consume_subscriptions(topic) - self.assertEqual(len(empty), 0) - subscriptions_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for subscription_name in subscriptions_to_create: - subscription = topic.subscription(subscription_name) - subscription.create() - self.to_delete.append(subscription) - - # Retrieve the subscriptions. - def _all_created(result): - return len(result) == len(subscriptions_to_create) - - retry = RetryResult(_all_created) - all_subscriptions = retry(_consume_subscriptions)(topic) - - created = [subscription for subscription in all_subscriptions - if subscription.name in subscriptions_to_create] - self.assertEqual(len(created), len(subscriptions_to_create)) - - def test_message_pull_mode_e2e(self): - import operator - TOPIC_NAME = 'message-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - MESSAGE_1 = b'MESSAGE ONE' - MESSAGE_2 = b'MESSAGE ONE' - EXTRA_1 = 'EXTRA 1' - EXTRA_2 = 'EXTRA 2' - topic.publish(MESSAGE_1, extra=EXTRA_1) - topic.publish(MESSAGE_2, extra=EXTRA_2) - - class Hoover(object): - - def __init__(self): - self.received = [] - - def done(self, *dummy): - return len(self.received) == 2 - - def suction(self): - with subscription.auto_ack(max_messages=2) as ack: - self.received.extend(ack.values()) - - hoover = Hoover() - retry = RetryInstanceState(hoover.done) - retry(hoover.suction)() - - message1, message2 = sorted(hoover.received, - key=operator.attrgetter('timestamp')) - - self.assertEqual(message1.data, MESSAGE_1) - self.assertEqual(message1.attributes['extra'], EXTRA_1) - self.assertIsNotNone(message1.service_timestamp) - - self.assertEqual(message2.data, MESSAGE_2) - self.assertEqual(message2.attributes['extra'], EXTRA_2) - self.assertIsNotNone(message2.service_timestamp) - - def _maybe_emulator_skip(self): - # NOTE: This method is necessary because ``Config.IN_EMULATOR`` - # is set at runtime rather than import time, which means we - # can't use the @unittest.skipIf decorator. - if Config.IN_EMULATOR: - self.skipTest('IAM not supported by Pub/Sub emulator') - - def test_topic_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_TOPICS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-topic-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - if topic.check_iam_permissions([PUBSUB_TOPICS_GET_IAM_POLICY]): - policy = topic.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = topic.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_subscription_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-sub-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - SUB_NAME = 'test-sub-iam-policy-sub' + unique_resource_id('-') - subscription = topic.subscription(SUB_NAME) - subscription.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(subscription.exists)() - self.to_delete.insert(0, subscription) - - if subscription.check_iam_permissions( - [PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY]): - policy = subscription.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = subscription.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_create_snapshot(self): - TOPIC_NAME = 'create-snap-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - before_snapshots = _consume_snapshots(Config.CLIENT) - - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=600) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - # There is no GET method for snapshot, so check existence using - # list - def retry_predicate(result): - return len(result) > len(before_snapshots) - - retry = RetryResult(retry_predicate, max_tries=5) - after_snapshots = retry(_consume_snapshots)(Config.CLIENT) - self.assertEqual(len(before_snapshots) + 1, len(after_snapshots)) - - def full_name(obj): - return obj.full_name - - self.assertIn(snapshot.full_name, map(full_name, after_snapshots)) - self.assertNotIn(snapshot.full_name, map(full_name, before_snapshots)) - - with self.assertRaises(Conflict): - snapshot.create() - - def test_seek(self): - TOPIC_NAME = 'seek-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - - SUBSCRIPTION_NAME = 'subscribing-to-seek' + unique_resource_id('-') - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - MESSAGE_1 = b'MESSAGE ONE' - topic.publish(MESSAGE_1) - MESSAGE_2 = b'MESSAGE TWO' - topic.publish(MESSAGE_2) - - ((ack_id_1a, recvd_1a), ) = subscription.pull() - ((ack_id_2a, recvd_2a), ) = subscription.pull() - before_data = [obj.data for obj in (recvd_1a, recvd_2a)] - self.assertIn(MESSAGE_1, before_data) - self.assertIn(MESSAGE_2, before_data) - subscription.acknowledge((ack_id_1a, ack_id_2a)) - - self.assertFalse(subscription.pull(return_immediately=True)) - - subscription.seek_snapshot(snapshot) - - ((_, recvd_1b), ) = subscription.pull() - ((_, recvd_2b), ) = subscription.pull() - after_data = [obj.data for obj in (recvd_1b, recvd_2b)] - self.assertEqual(sorted(before_data), sorted(after_data)) + return 'projects/{project}/{resource_type}s/st-n{random}'.format( + project=auth.default()[1], + random=str(uuid.uuid4())[0:8], + resource_type=resource_type, + ) + + +def test_publish_messages(): + publisher = pubsub_v1.PublisherClient() + topic_name = _resource_name('topic') + futures = [] + + try: + publisher.create_topic(topic_name) + for i in range(0, 500): + futures.append( + publisher.publish( + topic_name, + b'The hail in Wales falls mainly on the snails.', + num=str(i), + ), + ) + for future in futures: + result = future.result() + assert isinstance(result, (six.text_type, six.binary_type)) + finally: + publisher.delete_topic(topic_name) + + +def test_subscribe_to_messages(): + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_name = _resource_name('topic') + sub_name = _resource_name('subscription') + + try: + # Create a topic. + publisher.create_topic(topic_name) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription(sub_name, topic_name) + subscription = subscriber.subscribe(sub_name) + + # Publish some messages. + futures = [publisher.publish( + topic_name, + b'Wooooo! The claaaaaw!', + num=str(i), + ) for i in range(0, 50)] + + # Make sure the publish completes. + [f.result() for f in futures] + + # The callback should process the message numbers to prove + # that we got everything at least once. + callback = mock.Mock(wraps=lambda message: message.ack()) + + # Actually open the subscription and hold it open for a few seconds. + subscription.open(callback) + for second in range(0, 10): + time.sleep(1) + + # The callback should have fired at least fifty times, but it + # may take some time. + if callback.call_count >= 50: + return + + # Okay, we took too long; fail out. + assert callback.call_count >= 50 + finally: + publisher.delete_topic(topic_name) + subscriber.delete_subscription(sub_name) diff --git a/pubsub/tests/unit/__init__.py b/pubsub/tests/unit/__init__.py index 58e0d9153632..e69de29bb2d1 100644 --- a/pubsub/tests/unit/__init__.py +++ b/pubsub/tests/unit/__init__.py @@ -1,13 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py new file mode 100644 index 000000000000..05a749d58425 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -0,0 +1,69 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_batch(status=None, settings=types.BatchSettings()): + """Create a batch object, which does not commit. + + Args: + status (str): If provided, the batch's internal status will be set + to the provided status. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: The batch object + """ + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + batch = Batch(client, 'topic_name', settings, autocommit=False) + if status: + batch._status = status + return batch + + +def test_len(): + batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) + assert len(batch) == 0 + batch.publish(types.PubsubMessage(data=b'foo')) + assert len(batch) == 1 + + +def test_will_accept(): + batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) + message = types.PubsubMessage() + assert batch.will_accept(message) is True + + +def test_will_not_accept_status(): + batch = create_batch(status='talk to the hand') + message = types.PubsubMessage() + assert batch.will_accept(message) is False + + +def test_will_not_accept_size(): + batch = create_batch( + settings=types.BatchSettings(max_bytes=10), + status=BatchStatus.ACCEPTING_MESSAGES, + ) + message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') + assert batch.will_accept(message) is False diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py new file mode 100644 index 000000000000..00b761f52b96 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -0,0 +1,204 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import time + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def create_batch(autocommit=False, **batch_settings): + """Return a batch object suitable for testing. + + Args: + autocommit (bool): Whether the batch should commit after + ``max_latency`` seconds. By default, this is ``False`` + for unit testing. + kwargs (dict): Arguments passed on to the + :class:``~.pubsub_v1.types.BatchSettings`` constructor. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: A batch object. + """ + client = create_client() + settings = types.BatchSettings(**batch_settings) + return Batch(client, 'topic_name', settings, autocommit=autocommit) + + +def test_init(): + """Establish that a monitor thread is usually created on init.""" + client = create_client() + + # Do not actually create a thread, but do verify that one was created; + # it should be running the batch's "monitor" method (which commits the + # batch once time elapses). + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch = Batch(client, 'topic_name', types.BatchSettings()) + Thread.assert_called_once_with(target=batch.monitor) + + # New batches start able to accept messages by default. + assert batch.status == BatchStatus.ACCEPTING_MESSAGES + + +def test_init_infinite_latency(): + batch = create_batch(max_latency=float('inf')) + assert batch._thread is None + + +def test_client(): + client = create_client() + settings = types.BatchSettings() + batch = Batch(client, 'topic_name', settings, autocommit=False) + assert batch.client is client + + +def test_commit(): + batch = create_batch() + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch.commit() + + # A thread should have been created to do the actual commit. + Thread.assert_called_once_with(target=batch._commit) + Thread.return_value.start.assert_called_once_with() + + # The batch's status needs to be something other than "accepting messages", + # since the commit started. + assert batch.status != BatchStatus.ACCEPTING_MESSAGES + + +def test_blocking_commit(): + batch = create_batch() + futures = ( + batch.publish({'data': b'This is my message.'}), + batch.publish({'data': b'This is another message.'}), + ) + + # Set up the underlying API publish method to return a PublishResponse. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a', 'b']) + + # Actually commit the batch. + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with('topic_name', [ + types.PubsubMessage(data=b'This is my message.'), + types.PubsubMessage(data=b'This is another message.'), + ]) + + # Establish that all of the futures are done, and that they have the + # expected values. + assert all([f.done() for f in futures]) + assert futures[0].result() == 'a' + assert futures[1].result() == 'b' + + +def test_blocking_commit_no_messages(): + batch = create_batch() + with mock.patch.object(type(batch.client.api), 'publish') as publish: + batch._commit() + assert publish.call_count == 0 + + +def test_blocking_commit_wrong_messageid_length(): + batch = create_batch() + futures = ( + batch.publish({'data': b'blah blah blah'}), + batch.publish({'data': b'blah blah blah blah'}), + ) + + # Set up a PublishResponse that only returns one message ID. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a']) + batch._commit() + for future in futures: + assert future.done() + assert isinstance(future.exception(), exceptions.PublishError) + + +def test_monitor(): + batch = create_batch(max_latency=5.0) + with mock.patch.object(time, 'sleep') as sleep: + with mock.patch.object(type(batch), '_commit') as _commit: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # Since `monitor` runs in its own thread, it should call + # the blocking commit implementation. + _commit.assert_called_once_with() + + +def test_monitor_already_committed(): + batch = create_batch(max_latency=5.0) + batch._status = 'something else' + with mock.patch.object(time, 'sleep') as sleep: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # The status should not have changed. + assert batch._status == 'something else' + + +def test_publish(): + batch = create_batch() + messages = ( + types.PubsubMessage(data=b'foobarbaz'), + types.PubsubMessage(data=b'spameggs'), + types.PubsubMessage(data=b'1335020400'), + ) + + # Publish each of the messages, which should save them to the batch. + for message in messages: + batch.publish(message) + + # There should be three messages on the batch, and three futures. + assert len(batch.messages) == 3 + assert len(batch._futures) == 3 + + # The size should have been incremented by the sum of the size of the + # messages. + assert batch.size == sum([m.ByteSize() for m in messages]) + assert batch.size > 0 # I do not always trust protobuf. + + +def test_publish_dict(): + batch = create_batch() + batch.publish({'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) + + # There should be one message on the batch. + assert len(batch.messages) == 1 + + # It should be an actual protobuf Message at this point, with the + # expected values. + message = batch.messages[0] + assert isinstance(message, types.PubsubMessage) + assert message.data == b'foobarbaz' + assert message.attributes == {'spam': 'eggs'} diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py b/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py new file mode 100644 index 000000000000..e9b64a202e94 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_futures.py @@ -0,0 +1,118 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import pytest + +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.futures import Future + + +def test_cancel(): + assert Future().cancel() is False + + +def test_cancelled(): + assert Future().cancelled() is False + + +def test_running(): + assert Future().running() is True + + +def test_done(): + future = Future() + assert future.done() is False + future.set_result('12345') + assert future.done() is True + + +def test_exception_no_error(): + future = Future() + future.set_result('12345') + assert future.exception() is None + + +def test_exception_with_error(): + future = Future() + error = RuntimeError('Something really bad happened.') + future.set_exception(error) + + # Make sure that the exception that is returned is the batch's error. + # Also check the type to ensure the batch's error did not somehow + # change internally. + assert future.exception() is error + assert isinstance(future.exception(), RuntimeError) + with pytest.raises(RuntimeError): + future.result() + + +def test_exception_timeout(): + future = Future() + with pytest.raises(exceptions.TimeoutError): + future.exception(timeout=0.01) + + +def test_result_no_error(): + future = Future() + future.set_result('42') + assert future.result() == '42' + + +def test_result_with_error(): + future = Future() + future.set_exception(RuntimeError('Something really bad happened.')) + with pytest.raises(RuntimeError): + future.result() + + +def test_add_done_callback_pending_batch(): + future = Future() + callback = mock.Mock() + future.add_done_callback(callback) + assert len(future._callbacks) == 1 + assert callback in future._callbacks + assert callback.call_count == 0 + + +def test_add_done_callback_completed_batch(): + future = Future() + future.set_result('12345') + callback = mock.Mock(spec=()) + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_trigger(): + future = Future() + callback = mock.Mock(spec=()) + future.add_done_callback(callback) + assert callback.call_count == 0 + future.set_result('12345') + callback.assert_called_once_with(future) + + +def test_set_result_once_only(): + future = Future() + future.set_result('12345') + with pytest.raises(RuntimeError): + future.set_result('67890') + + +def test_set_exception_once_only(): + future = Future() + future.set_exception(ValueError('wah wah')) + with pytest.raises(RuntimeError): + future.set_exception(TypeError('other wah wah')) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py new file mode 100644 index 000000000000..0054b25262b5 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -0,0 +1,143 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.gapic.pubsub.v1 import publisher_client +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def test_init(): + client = create_client() + + # A plain client should have an `api` (the underlying GAPIC) and a + # batch settings object, which should have the defaults. + assert isinstance(client.api, publisher_client.PublisherClient) + assert client.batch_settings.max_bytes == 5 * (2 ** 20) + assert client.batch_settings.max_latency == 0.05 + assert client.batch_settings.max_messages == 1000 + + +def test_batch_accepting(): + """Establish that an existing batch is returned if it accepts messages.""" + client = create_client() + message = types.PubsubMessage(data=b'foo') + + # At first, there are no batches, so this should return a new batch + # which is also saved to the object. + ante = len(client._batches) + batch = client.batch('topic_name', message, autocommit=False) + assert len(client._batches) == ante + 1 + assert batch is client._batches['topic_name'] + + # A subsequent request should return the same batch. + batch2 = client.batch('topic_name', message, autocommit=False) + assert batch is batch2 + assert batch2 is client._batches['topic_name'] + + +def test_batch_without_autocreate(): + client = create_client() + message = types.PubsubMessage(data=b'foo') + + # If `create=False` is sent, then when the batch is not found, None + # is returned instead. + ante = len(client._batches) + batch = client.batch('topic_name', message, create=False) + assert batch is None + assert len(client._batches) == ante + + +def test_publish(): + client = create_client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'spam') + client.publish('topic_name', b'foo', bar='baz') + + # The batch's publish method should have been called twice. + assert batch.publish.call_count == 2 + + # In both cases + # The first call should correspond to the first message. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'spam' + assert not args[0].attributes + + # The second call should correspond to the second message. + _, args, _ = batch.publish.mock_calls[1] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_data_not_bytestring_error(): + client = create_client() + with pytest.raises(TypeError): + client.publish('topic_name', u'This is a text string.') + with pytest.raises(TypeError): + client.publish('topic_name', 42) + + +def test_publish_attrs_bytestring(): + client = create_client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'foo', bar=b'baz') + + # The attributes should have been sent as text. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_attrs_type_error(): + client = create_client() + with pytest.raises(TypeError): + client.publish('topic_name', b'foo', answer=42) + + +def test_gapic_instance_method(): + client = create_client() + with mock.patch.object(client.api, '_create_topic', autospec=True) as ct: + client.create_topic('projects/foo/topics/bar') + assert ct.call_count == 1 + _, args, _ = ct.mock_calls[0] + assert args[0] == types.Topic(name='projects/foo/topics/bar') + + +def test_gapic_class_method(): + client = create_client() + answer = client.topic_path('foo', 'bar') + assert answer == 'projects/foo/topics/bar' diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py new file mode 100644 index 000000000000..2a3429fbc5b3 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -0,0 +1,117 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_consumer(): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + subscription = client.subscribe('sub_name_e') + return _consumer.Consumer(policy=subscription) + + +def test_send_request(): + consumer = create_consumer() + request = types.StreamingPullRequest(subscription='foo') + with mock.patch.object(queue.Queue, 'put') as put: + consumer.send_request(request) + put.assert_called_once_with(request) + + +def test_request_generator_thread(): + consumer = create_consumer() + generator = consumer._request_generator_thread() + + # The first request that comes from the request generator thread + # should always be the initial request. + initial_request = next(generator) + assert initial_request.subscription == 'sub_name_e' + assert initial_request.stream_ack_deadline_seconds == 10 + + # Subsequent requests correspond to items placed in the request queue. + consumer.send_request(types.StreamingPullRequest(ack_ids=['i'])) + request = next(generator) + assert request.ack_ids == ['i'] + + # The poison pill should stop the loop. + consumer.send_request(_helper_threads.STOP) + with pytest.raises(StopIteration): + next(generator) + + +def test_blocking_consume(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we run out of them. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + consumer._blocking_consume() + assert on_res.call_count == 2 + assert on_res.mock_calls[0][1][1] == mock.sentinel.A + assert on_res.mock_calls[1][1][1] == mock.sentinel.B + + +def test_blocking_consume_keyboard_interrupt(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we are sent the exiting event. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + on_res.side_effect = KeyboardInterrupt + consumer._blocking_consume() + on_res.assert_called_once_with(consumer._policy, mock.sentinel.A) + + +@mock.patch.object(thread.Policy, 'call_rpc', autospec=True) +@mock.patch.object(thread.Policy, 'on_response', autospec=True) +@mock.patch.object(thread.Policy, 'on_exception', autospec=True) +def test_blocking_consume_exception_reraise(on_exc, on_res, call_rpc): + consumer = create_consumer() + + # Establish that we get responses until we are sent the exiting event. + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + on_res.side_effect = TypeError('Bad things!') + on_exc.side_effect = on_res.side_effect + with pytest.raises(TypeError): + consumer._blocking_consume() + + +def test_start_consuming(): + consumer = create_consumer() + helper_threads = consumer.helper_threads + with mock.patch.object(helper_threads, 'start', autospec=True) as start: + consumer.start_consuming() + assert consumer._exiting.is_set() is False + assert consumer.active is True + start.assert_called_once_with( + 'consume bidirectional stream', + consumer._request_queue, + consumer._blocking_consume, + ) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py new file mode 100644 index 000000000000..84775f0be2c1 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -0,0 +1,125 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import threading + +import mock + +from google.cloud.pubsub_v1.subscriber import _helper_threads + + +def test_start(): + registry = _helper_threads.HelperThreadRegistry() + queue_ = queue.Queue() + target = mock.Mock(spec=()) + with mock.patch.object(threading.Thread, 'start', autospec=True) as start: + registry.start('foo', queue_, target) + assert start.called + + +def test_stop_noop(): + registry = _helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +def test_stop_dead_thread(): + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +@mock.patch.object(queue.Queue, 'put') +@mock.patch.object(threading.Thread, 'is_alive') +@mock.patch.object(threading.Thread, 'join') +def test_stop_alive_thread(join, is_alive, put): + is_alive.return_value = True + + # Set up a registry with a helper thread in it. + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=queue.Queue(), + thread=threading.Thread(target=lambda: None), + ) + + # Assert that the helper thread is present, and removed correctly + # on stop. + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + # Assert that all of our mocks were called in the expected manner. + is_alive.assert_called_once_with() + join.assert_called_once_with() + put.assert_called_once_with(_helper_threads.STOP) + + +def test_stop_all(): + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_stop_all_noop(): + registry = _helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_queue_callback_thread(): + queue_ = queue.Queue() + callback = mock.Mock(spec=()) + qct = _helper_threads.QueueCallbackThread(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 2 + callback.assert_called_once_with(mock.sentinel.A) + + +def test_queue_callback_thread_exception(): + queue_ = queue.Queue() + callback = mock.Mock(spec=(), side_effect=(Exception,)) + qct = _helper_threads.QueueCallbackThread(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 2 + callback.assert_called_once_with(mock.sentinel.A) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py new file mode 100644 index 000000000000..23474a19d116 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -0,0 +1,84 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.pubsub_v1.subscriber import _histogram + + +def test_init(): + data = {} + histo = _histogram.Histogram(data=data) + assert histo._data is data + assert len(histo) == 0 + + +def test_contains(): + histo = _histogram.Histogram() + histo.add(10) + histo.add(20) + assert 10 in histo + assert 20 in histo + assert 30 not in histo + + +def test_max(): + histo = _histogram.Histogram() + assert histo.max == 600 + histo.add(120) + assert histo.max == 120 + histo.add(150) + assert histo.max == 150 + histo.add(20) + assert histo.max == 150 + + +def test_min(): + histo = _histogram.Histogram() + assert histo.min == 10 + histo.add(60) + assert histo.min == 60 + histo.add(30) + assert histo.min == 30 + histo.add(120) + assert histo.min == 30 + + +def test_add(): + histo = _histogram.Histogram() + histo.add(60) + assert histo._data[60] == 1 + histo.add(60) + assert histo._data[60] == 2 + + +def test_add_lower_limit(): + histo = _histogram.Histogram() + histo.add(5) + assert 5 not in histo + assert 10 in histo + + +def test_add_upper_limit(): + histo = _histogram.Histogram() + histo.add(12000) + assert 12000 not in histo + assert 600 in histo + + +def test_percentile(): + histo = _histogram.Histogram() + [histo.add(i) for i in range(101, 201)] + assert histo.percentile(100) == 200 + assert histo.percentile(101) == 200 + assert histo.percentile(99) == 199 + assert histo.percentile(1) == 101 diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py new file mode 100644 index 000000000000..a3a1e16f027e --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -0,0 +1,102 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import time + +import mock + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import message + + +def create_message(data, ack_id='ACKID', **attrs): + with mock.patch.object(message.Message, 'lease') as lease: + with mock.patch.object(time, 'time') as time_: + time_.return_value = 1335020400 + msg = message.Message(types.PubsubMessage( + attributes=attrs, + data=data, + message_id='message_id', + publish_time=types.Timestamp(seconds=1335020400 - 86400), + ), ack_id, queue.Queue()) + lease.assert_called_once_with() + return msg + + +def test_attributes(): + msg = create_message(b'foo', baz='bacon', spam='eggs') + assert msg.attributes == {'baz': 'bacon', 'spam': 'eggs'} + + +def test_data(): + msg = create_message(b'foo') + assert msg.data == b'foo' + + +def test_publish_time(): + msg = create_message(b'foo') + assert msg.publish_time == types.Timestamp(seconds=1335020400 - 86400) + + +def test_ack(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + with mock.patch.object(message.Message, 'drop') as drop: + msg.ack() + put.assert_called_once_with(('ack', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + 'time_to_ack': mock.ANY, + })) + + +def test_drop(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.drop() + put.assert_called_once_with(('drop', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) + + +def test_lease(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.lease() + put.assert_called_once_with(('lease', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) + + +def test_modify_ack_deadline(): + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.modify_ack_deadline(60) + put.assert_called_once_with(('modify_ack_deadline', { + 'ack_id': 'bogus_id', + 'seconds': 60, + })) + + +def test_nack(): + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.nack() + put.assert_called_once_with(('nack', { + 'ack_id': 'bogus_id', + 'byte_size': 25, + })) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py new file mode 100644 index 000000000000..df963424ccb9 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -0,0 +1,231 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(flow_control=types.FlowControl()): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + return thread.Policy(client, 'sub_name_d', flow_control=flow_control) + + +def test_ack_deadline(): + policy = create_policy() + assert policy.ack_deadline == 10 + policy.histogram.add(20) + assert policy.ack_deadline == 20 + policy.histogram.add(10) + assert policy.ack_deadline == 20 + + +def test_get_initial_request(): + policy = create_policy() + initial_request = policy.get_initial_request() + assert isinstance(initial_request, types.StreamingPullRequest) + assert initial_request.subscription == 'sub_name_d' + assert initial_request.stream_ack_deadline_seconds == 10 + + +def test_managed_ack_ids(): + policy = create_policy() + + # Ensure we always get a set back, even if the property is not yet set. + managed_ack_ids = policy.managed_ack_ids + assert isinstance(managed_ack_ids, set) + + # Ensure that multiple calls give the same actual object back. + assert managed_ack_ids is policy.managed_ack_ids + + +def test_subscription(): + policy = create_policy() + assert policy.subscription == 'sub_name_d' + + +def test_ack(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string', 20) + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + assert len(policy.histogram) == 1 + assert 20 in policy.histogram + + +def test_ack_no_time(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string') + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + assert len(policy.histogram) == 0 + + +def test_ack_paused(): + policy = create_policy() + policy._paused = True + policy._consumer.active = False + with mock.patch.object(policy, 'open') as open_: + policy.ack('ack_id_string') + open_.assert_called() + assert 'ack_id_string' in policy._ack_on_resume + + +def test_call_rpc(): + policy = create_policy() + with mock.patch.object(policy._client.api, 'streaming_pull') as pull: + policy.call_rpc(mock.sentinel.GENERATOR) + pull.assert_called_once_with(mock.sentinel.GENERATOR) + + +def test_drop(): + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 20 + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + + # Do this again to establish idempotency. + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + + +def test_drop_below_threshold(): + """Establish that we resume a paused subscription. + + If the subscription is paused, and we drop sufficiently below + the flow control thresholds, it should resume. + """ + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 20 + policy._paused = True + with mock.patch.object(policy, 'open') as open_: + policy.drop(ack_id='ack_id_string', byte_size=20) + open_.assert_called_once_with(policy._callback) + assert policy._paused is False + + +def test_load(): + flow_control = types.FlowControl(max_messages=10, max_bytes=1000) + policy = create_policy(flow_control=flow_control) + + # This should mean that our messages count is at 10%, and our bytes + # are at 15%; the ._load property should return the higher (0.15). + policy.lease(ack_id='one', byte_size=150) + assert policy._load == 0.15 + + # After this message is added, the messages should be higher at 20% + # (versus 16% for bytes). + policy.lease(ack_id='two', byte_size=10) + assert policy._load == 0.2 + + # Returning a number above 100% is fine. + policy.lease(ack_id='three', byte_size=1000) + assert policy._load == 1.16 + + +def test_modify_ack_deadline(): + policy = create_policy() + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.modify_ack_deadline('ack_id_string', 60) + send_request.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['ack_id_string'], + modify_deadline_seconds=[60], + )) + + +def test_maintain_leases_inactive_consumer(): + policy = create_policy() + policy._consumer.active = False + assert policy.maintain_leases() is None + + +def test_maintain_leases_ack_ids(): + policy = create_policy() + policy._consumer.active = True + policy.lease('my ack id', 50) + + # Mock the sleep object. + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + + # Also mock the consumer, which sends the request. + with mock.patch.object(policy._consumer, 'send_request') as send: + policy.maintain_leases() + send.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['my ack id'], + modify_deadline_seconds=[10], + )) + sleep.assert_called() + + +def test_maintain_leases_no_ack_ids(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + policy.maintain_leases() + sleep.assert_called() + + +def test_lease(): + policy = create_policy() + policy.lease(ack_id='ack_id_string', byte_size=20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + # Do this again to prove idempotency. + policy.lease(ack_id='ack_id_string', byte_size=20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + +def test_lease_above_threshold(): + flow_control = types.FlowControl(max_messages=2) + policy = create_policy(flow_control=flow_control) + with mock.patch.object(policy, 'close') as close: + policy.lease(ack_id='first_ack_id', byte_size=20) + assert close.call_count == 0 + policy.lease(ack_id='second_ack_id', byte_size=25) + close.assert_called_once_with() + + +def test_nack(): + policy = create_policy() + with mock.patch.object(policy, 'modify_ack_deadline') as mad: + with mock.patch.object(policy, 'drop') as drop: + policy.nack(ack_id='ack_id_string', byte_size=10) + drop.assert_called_once_with(ack_id='ack_id_string', byte_size=10) + mad.assert_called_once_with(ack_id='ack_id_string', seconds=0) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py new file mode 100644 index 000000000000..76aec184815e --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -0,0 +1,120 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent import futures +import queue +import threading + +import grpc + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(**kwargs): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + return thread.Policy(client, 'sub_name_c', **kwargs) + + +def test_init(): + policy = create_policy() + policy._callback(None) + + +def test_init_with_executor(): + executor = futures.ThreadPoolExecutor(max_workers=25) + policy = create_policy(executor=executor, queue=queue.Queue()) + assert policy._executor is executor + + +def test_close(): + policy = create_policy() + consumer = policy._consumer + with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: + policy.close() + stop_consuming.assert_called_once_with() + assert 'callback request worker' not in policy._consumer.helper_threads + + +@mock.patch.object(_helper_threads.HelperThreadRegistry, 'start') +@mock.patch.object(threading.Thread, 'start') +def test_open(thread_start, htr_start): + policy = create_policy() + with mock.patch.object(policy._consumer, 'start_consuming') as consuming: + policy.open(mock.sentinel.CALLBACK) + assert policy._callback is mock.sentinel.CALLBACK + consuming.assert_called_once_with() + htr_start.assert_called() + thread_start.assert_called() + + +def test_on_callback_request(): + policy = create_policy() + with mock.patch.object(policy, 'call_rpc') as call_rpc: + policy.on_callback_request(('call_rpc', {'something': 42})) + call_rpc.assert_called_once_with(something=42) + + +def test_on_exception_deadline_exceeded(): + policy = create_policy() + exc = mock.Mock(spec=('code',)) + exc.code.return_value = grpc.StatusCode.DEADLINE_EXCEEDED + assert policy.on_exception(exc) is None + + +def test_on_exception_other(): + policy = create_policy() + exc = TypeError('wahhhhhh') + with pytest.raises(TypeError): + policy.on_exception(exc) + + +def test_on_response(): + callback = mock.Mock(spec=()) + + # Set up the policy. + policy = create_policy() + policy._callback = callback + + # Set up the messages to send. + messages = ( + types.PubsubMessage(data=b'foo', message_id='1'), + types.PubsubMessage(data=b'bar', message_id='2'), + ) + + # Set up a valid response. + response = types.StreamingPullResponse( + received_messages=[ + {'ack_id': 'fack', 'message': messages[0]}, + {'ack_id': 'back', 'message': messages[1]}, + ], + ) + + # Actually run the method and prove that the callback was + # called in the expected way. + policy.on_response(response) + assert callback.call_count == 2 + for call in callback.mock_calls: + assert isinstance(call[1][0], message.Message) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py new file mode 100644 index 000000000000..50e90fead181 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -0,0 +1,44 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return subscriber.Client(credentials=creds) + + +def test_init(): + client = create_client() + assert client._policy_class is thread.Policy + + +def test_subscribe(): + client = create_client() + subscription = client.subscribe('sub_name_a') + assert isinstance(subscription, thread.Policy) + + +def test_subscribe_with_callback(): + client = create_client() + callback = mock.Mock() + with mock.patch.object(thread.Policy, 'open') as open_: + subscription = client.subscribe('sub_name_b', callback) + open_.assert_called_once_with(callback) + assert isinstance(subscription, thread.Policy) diff --git a/pubsub/tests/unit/test__gax.py b/pubsub/tests/unit/test__gax.py deleted file mode 100644 index dd2ea8077f84..000000000000 --- a/pubsub/tests/unit/test__gax.py +++ /dev/null @@ -1,1661 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -try: - # pylint: disable=unused-import - import google.cloud.pubsub._gax - # pylint: enable=unused-import -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False -else: - _HAVE_GRPC = True - -from google.cloud._testing import _GAXBaseAPI - - -def _make_credentials(): - # pylint: disable=redefined-outer-name - import google.auth.credentials - # pylint: enable=redefined-outer-name - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(object): - PROJECT = 'PROJECT' - PROJECT_PATH = 'projects/%s' % (PROJECT,) - LIST_TOPICS_PATH = '%s/topics' % (PROJECT_PATH,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SUB_NAME = 'sub_name' - SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = '%s/snapshots/%s' % (PROJECT_PATH, SNAPSHOT_NAME) - TIME = 12345 - - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_PublisherAPI(_Base, unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _PublisherAPI - - return _PublisherAPI - - def test_ctor(self): - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_topics_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - TOKEN = 'TOKEN' - response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], - page_token=TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_topics_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - response = _GAXPageIterator( - [_TopicPB(self.TOPIC_PATH)], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, NEW_TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_create(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_create_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXPublisherAPI(_create_topic_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXPublisherAPI(_create_topic_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_hit(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_get_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_hit(self): - gax_api = _GAXPublisherAPI(_delete_topic_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI(_delete_topic_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': B64, 'attributes': {}} - response = _PublishResponsePB([MSGID]) - gax_api = _GAXPublisherAPI(_publish_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_publish_miss_w_attrs_w_bytes_payload(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD) - MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} - timeout = 120 # 120 seconds or 2 minutes - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE], timeout=timeout) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data, B64) - self.assertEqual(message_pb.attributes, {'foo': 'bar'}) - self.assertEqual(options.is_bundling, False) - self.assertEqual(options.timeout, timeout) - - def test_topic_publish_error(self): - import base64 - from google.gax.errors import GaxError - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': B64, 'attributes': {}} - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator([local_sub_path]) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator( - [local_sub_path], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(next_token, NEW_TOKEN) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - name, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(name, self.TOPIC_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_list_subscriptions_miss(self): - from google.gax import INITIAL_PAGE - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_error(self): - from google.gax import INITIAL_PAGE - from google.gax.errors import GaxError - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_SubscriberAPI(_Base, unittest.TestCase): - - PUSH_ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _SubscriberAPI - - return _SubscriberAPI - - def test_ctor(self): - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb]) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_subscriptions_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 23) - self.assertEqual(options.page_token, TOKEN) - - def test_subscription_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_optional_params(self): - import datetime - - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - expected_ack_deadline = 1729 - expected_push_endpoint = 'push-endpoint' - expected_retain_acked_messages = True - expected_message_retention_duration = datetime.timedelta( - days=1, hours=7, minutes=2, seconds=9) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, ack_deadline=expected_ack_deadline, - push_endpoint=expected_push_endpoint, - retain_acked_messages=expected_retain_acked_messages, - message_retention_duration=expected_message_retention_duration) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - print(gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, expected_push_endpoint) - self.assertEqual(ack_deadline, expected_ack_deadline) - self.assertEqual(retain_acked_messages, expected_retain_acked_messages) - self.assertEqual(message_retention_duration.seconds, - expected_message_retention_duration.total_seconds()) - self.assertIsNone(options) - - def test_subscription_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - DEADLINE = 600 - gax_api = _GAXSubscriberAPI( - _create_subscription_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) - self.assertEqual(ack_deadline, DEADLINE) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_already_exists(self): - from google.cloud.exceptions import Conflict - - DEADLINE = 600 - gax_api = _GAXSubscriberAPI(_create_subscription_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) - self.assertEqual(ack_deadline, DEADLINE) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_get_hit(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - gax_api = _GAXSubscriberAPI(_get_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_get(self.SUB_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'pushConfig': { - 'pushEndpoint': self.PUSH_ENDPOINT, - }, - } - self.assertEqual(resource, expected) - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_modify_push_config_hit(self): - gax_api = _GAXSubscriberAPI(_modify_push_config_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_push_config(self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_pull_explicit(self): - import base64 - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import _datetime_to_rfc3339 - - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - NOW_PB = _datetime_to_pb_timestamp(NOW) - NOW_RFC3339 = _datetime_to_rfc3339(NOW) - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = { - 'messageId': MSG_ID, - 'data': B64, - 'attributes': {'a': 'b'}, - 'publishTime': NOW_RFC3339, - } - RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}] - message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB) - response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)]) - gax_api = _GAXSubscriberAPI(_pull_response=response_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - MAX_MESSAGES = 10 - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RECEIVED) - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, MAX_MESSAGES) - self.assertTrue(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_deadline_exceeded(self): - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - result = api.subscription_pull(self.SUB_PATH) - self.assertEqual(result, []) - - def test_subscription_pull_deadline_exceeded_return_immediately(self): - from google.gax.errors import GaxError - - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH, return_immediately=True) - - def test_subscription_acknowledge_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_acknowledge_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_modify_ack_deadline_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_list_snapshots_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB( - name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb]) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_list_snapshots_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB(name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_subscription_seek_hit(self): - gax_api = _GAXSubscriberAPI(_seek_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_seek_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Snapshot - - snapshot_pb = Snapshot(name=self.SNAPSHOT_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_snapshot_response=snapshot_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - expected = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXSubscriberAPI(_create_snapshot_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXSubscriberAPI(_create_snapshot_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_subscrption_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_snapshot_create_subscription_miss=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_publisher_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_publisher_api - - return make_gax_publisher_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - publisher_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_publisher_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - publisher_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_subscriber_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_subscriber_api - - return make_gax_subscriber_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - subscriber_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_subscriber_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - subscriber_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -class _GAXPublisherAPI(_GAXBaseAPI): - - _create_topic_failed_precondition = False - _create_topic_already_exists = False - - def list_topics(self, name, page_size, options): - self._list_topics_called_with = name, page_size, options - return self._list_topics_response - - def create_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._create_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if self._create_topic_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._create_topic_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - return self._create_topic_response - - def get_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._get_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_topic_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_topic_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def publish(self, topic, messages, options=None): - from google.gax.errors import GaxError - - self._publish_called_with = topic, messages, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._publish_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_topic_subscriptions(self, topic, page_size, options=None): - from google.gax.errors import GaxError - - self._list_topic_subscriptions_called_with = topic, page_size, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._list_topic_subscriptions_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - -class _GAXSubscriberAPI(_GAXBaseAPI): - - _create_snapshot_already_exists = False - _create_snapshot_failed_precondition = False - _create_subscription_already_exists = False - _create_subscription_failed_precondition = False - _modify_push_config_ok = False - _acknowledge_ok = False - _modify_ack_deadline_ok = False - _deadline_exceeded_gax_error = False - _snapshot_create_subscription_miss=False - - def list_subscriptions(self, project, page_size, options=None): - self._list_subscriptions_called_with = (project, page_size, options) - return self._list_subscriptions_response - - def create_subscription(self, name, topic, push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - options=None): - from google.gax.errors import GaxError - - self._create_subscription_called_with = ( - name, topic, push_config, ack_deadline_seconds, - retain_acked_messages, message_retention_duration, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_subscription_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._create_subscription_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - return self._create_subscription_response - - def get_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._get_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_subscription_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_subscription_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_push_config(self, name, push_config, options=None): - from google.gax.errors import GaxError - - self._modify_push_config_called_with = name, push_config, options - if self._random_gax_error: - raise GaxError('error') - if not self._modify_push_config_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def pull(self, name, max_messages, return_immediately, options=None): - from google.gax.errors import GaxError - - self._pull_called_with = ( - name, max_messages, return_immediately, options) - if self._random_gax_error: - raise GaxError('error') - if self._deadline_exceeded_gax_error: - raise GaxError('deadline exceeded', - self._make_grpc_deadline_exceeded()) - try: - return self._pull_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def acknowledge(self, name, ack_ids, options=None): - from google.gax.errors import GaxError - - self._acknowledge_called_with = name, ack_ids, options - if self._random_gax_error: - raise GaxError('error') - if not self._acknowledge_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_ack_deadline(self, name, ack_ids, deadline, options=None): - from google.gax.errors import GaxError - - self._modify_ack_deadline_called_with = ( - name, ack_ids, deadline, options) - if self._random_gax_error: - raise GaxError('error') - if not self._modify_ack_deadline_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_snapshots(self, project, page_size, options=None): - self._list_snapshots_called_with = (project, page_size, options) - return self._list_snapshots_response - - def create_snapshot(self, name, subscription, options=None): - from google.gax.errors import GaxError - - self._create_snapshot_called_with = (name, subscription, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_snapshot_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - if self._create_snapshot_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._snapshot_create_subscription_miss: - raise GaxError('miss', self._make_grpc_not_found()) - - return self._create_snapshot_response - - def delete_snapshot(self, snapshot, options=None): - from google.gax.errors import GaxError - - self._delete_snapshot_called_with = (snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._delete_snapshot_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def seek(self, subscription, time=None, snapshot=None, options=None): - from google.gax.errors import GaxError - - self._seek_called_with = (subscription, time, snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._seek_ok: - raise GaxError('miss', self._make_grpc_not_found()) - -class _TopicPB(object): - - def __init__(self, name): - self.name = name - - -class _PublishResponsePB(object): - - def __init__(self, message_ids): - self.message_ids = message_ids - - -class _PubsubMessagePB(object): - - def __init__(self, message_id, data, attributes, publish_time): - self.message_id = message_id - self.data = data - self.attributes = attributes - self.publish_time = publish_time - - -class _ReceivedMessagePB(object): - - def __init__(self, ack_id, message): - self.ack_id = ack_id - self.message = message - - -class _PullResponsePB(object): - - def __init__(self, received_messages): - self.received_messages = received_messages - - -class _Client(object): - - def __init__(self, project): - self.project = project diff --git a/pubsub/tests/unit/test__helpers.py b/pubsub/tests/unit/test__helpers.py deleted file mode 100644 index 0503d68b20b9..000000000000 --- a/pubsub/tests/unit/test__helpers.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_topic_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import topic_name_from_path - - return topic_name_from_path(path, project) - - def test_w_simple_name(self): - TOPIC_NAME = 'TOPIC_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - def test_w_name_w_all_extras(self): - TOPIC_NAME = 'TOPIC_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - -class Test_subscription_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import subscription_name_from_path - - return subscription_name_from_path(path, project) - - def test_w_simple_name(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - subscription_name = self._call_fut(PATH, PROJECT) - self.assertEqual(subscription_name, SUBSCRIPTION_NAME) - - def test_w_name_w_all_extras(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, SUBSCRIPTION_NAME) diff --git a/pubsub/tests/unit/test__http.py b/pubsub/tests/unit/test__http.py deleted file mode 100644 index 794fe093bbb3..000000000000 --- a/pubsub/tests/unit/test__http.py +++ /dev/null @@ -1,1165 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(unittest.TestCase): - PROJECT = 'PROJECT' - LIST_TOPICS_PATH = 'projects/%s/topics' % (PROJECT,) - LIST_SNAPSHOTS_PATH = 'projects/%s/snapshots' % (PROJECT,) - LIST_SUBSCRIPTIONS_PATH = 'projects/%s/subscriptions' % (PROJECT,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -class TestConnection(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import Connection - - return Connection - - def test_default_url(self): - conn = self._make_one(object()) - klass = self._get_target_class() - self.assertEqual(conn.api_base_url, klass.API_BASE_URL) - - def test_custom_url_from_env(self): - from google.cloud.environment_vars import PUBSUB_EMULATOR - - HOST = 'localhost:8187' - fake_environ = {PUBSUB_EMULATOR: HOST} - - with mock.patch('os.environ', new=fake_environ): - conn = self._make_one(object()) - - klass = self._get_target_class() - self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) - self.assertEqual(conn.api_base_url, 'http://' + HOST) - - def test_build_api_url_no_extra_query_params(self): - conn = self._make_one(object()) - URI = '/'.join([ - conn.API_BASE_URL, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo'), URI) - - def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - - conn = self._make_one(object()) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, - '/'.join(['', conn.API_VERSION, 'foo'])) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') - - def test_build_api_url_w_base_url_override(self): - base_url1 = 'api-base-url1' - base_url2 = 'api-base-url2' - conn = self._make_one(object()) - conn.api_base_url = base_url1 - URI = '/'.join([ - base_url2, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo', api_base_url=base_url2), - URI) - - def test_extra_headers(self): - import requests - - from google.cloud import _http as base_http - from google.cloud.pubsub import _http as MUT - - http = mock.create_autospec(requests.Session, instance=True) - response = requests.Response() - response.status_code = 200 - data = b'brent-spiner' - response._content = data - http.request.return_value = response - client = mock.Mock(_http=http, spec=['_http']) - - conn = self._make_one(client) - req_data = 'req-data-boring' - result = conn.api_request( - 'GET', '/rainbow', data=req_data, expect_json=False) - self.assertEqual(result, data) - - expected_headers = { - 'Accept-Encoding': 'gzip', - base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - 'User-Agent': conn.USER_AGENT, - } - expected_uri = conn.build_api_url('/rainbow') - http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method='GET', - url=expected_uri, - ) - - -class Test_PublisherAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _PublisherAPI - - return _PublisherAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - returned = {'topics': [{'name': self.TOPIC_PATH}]} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_topics_with_paging(self): - import six - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - RETURNED = { - 'topics': [{'name': self.TOPIC_PATH}], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - topics = list(page) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN2) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_topics_missing_key(self): - returned = {} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_create(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - connection = _Connection() - connection._no_response_error = Conflict - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_hit(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_hit(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': [MSGID]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - msg_data = connection._called_with['data']['messages'][0]['data'] - self.assertEqual(msg_data, B64_PAYLOAD) - - def test_topic_publish_twice(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': []} - connection = _Connection(RETURNED, RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - messages = connection._called_with['data']['messages'] - self.assertEqual(len(messages), 1) - self.assertEqual(messages[0]['data'], B64_PAYLOAD) - - def test_topic_publish_miss(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - - def test_topic_list_subscriptions_no_paging(self): - from google.cloud.pubsub.topic import Topic - from google.cloud.pubsub.subscription import Subscription - - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = {'subscriptions': [local_sub_path]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = { - 'subscriptions': [local_sub_path], - 'nextPageToken': TOKEN2, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_topic_list_subscriptions_missing_key(self): - from google.cloud.pubsub.topic import Topic - - connection = _Connection({}) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_miss(self): - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - list(api.topic_list_subscriptions(topic)) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - -class Test_SubscriberAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _SubscriberAPI - - return _SubscriberAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = {'subscriptions': [SUB_INFO]} - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_subscriptions_missing_key(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_subscription_create_defaults(self): - RESOURCE = {'topic': self.TOPIC_PATH} - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_retain_messages(self): - import datetime - - RESOURCE = {'topic': self.TOPIC_PATH, - 'retainAckedMessages': True, - 'messageRetentionDuration': { - 'seconds': 1729, - 'nanos': 2718 * 1000 - } - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - retain_acked_messages=True, - message_retention_duration=datetime.timedelta( - seconds=1729, microseconds=2718)) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_explicit(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RESOURCE = { - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': { - 'pushEndpoint': PUSH_ENDPOINT, - }, - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - ack_deadline=ACK_DEADLINE, push_endpoint=PUSH_ENDPOINT) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_get(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RETURNED = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_get(self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_delete(self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_modify_push_config(self): - PUSH_ENDPOINT = 'https://api.example.com/push' - BODY = { - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_push_config(self.SUB_PATH, PUSH_ENDPOINT) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyPushConfig' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_defaults(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - BODY = { - 'returnImmediately': False, - 'maxMessages': 1, - } - - received = api.subscription_pull(self.SUB_PATH) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(received[0]['message']['data'], PAYLOAD) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_explicit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - MAX_MESSAGES = 10 - BODY = { - 'returnImmediately': True, - 'maxMessages': MAX_MESSAGES, - } - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_acknowledge(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:acknowledge' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_modify_ack_deadline(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - 'ackDeadlineSeconds': NEW_DEADLINE, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyAckDeadline' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_list_snapshots_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = {'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_snapshots_with_paging(self): - import six - - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = { - 'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - 'nextPageToken': TOKEN2, - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - snapshots = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_subscription_seek_snapshot(self): - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - RETURNED = {} - BODY = { - 'snapshot': local_snapshot_path - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek( - self.SUB_PATH, snapshot=local_snapshot_path) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_seek_time(self): - time = '12345' - RETURNED = {} - BODY = { - 'time': time - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek(self.SUB_PATH, time=time) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create(self): - RETURNED = { - 'name': self.SNAPSHOT_PATH, - 'subscription': self.SUB_PATH - } - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import NotFound - - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - - -class Test_IAMPolicyAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _IAMPolicyAPI - - return _IAMPolicyAPI - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, None) - api = self._make_one(client) - self.assertEqual(api.api_request, connection.api_request) - - def test_get_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - RETURNED = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.get_iam_policy(self.TOPIC_PATH) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s:getIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_set_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - RETURNED = POLICY.copy() - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:setIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'policy': POLICY}) - - def test_test_iam_permissions(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = ALL_ROLES[1:] - RETURNED = {'permissions': ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, ALLOWED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - def test_test_iam_permissions_missing_key(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, []) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - -class Test__transform_messages_base64_empty(unittest.TestCase): - def _call_fut(self, messages, transform, key=None): - from google.cloud.pubsub._http import _transform_messages_base64 - - return _transform_messages_base64(messages, transform, key) - - def test__transform_messages_base64_empty_message(self): - from base64 import b64decode - - DATA = [{'message': {}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {}}]) - - def test__transform_messages_base64_empty_data(self): - from base64 import b64decode - - DATA = [{'message': {'data': b''}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {'data': b''}}]) - - def test__transform_messages_base64_pull(self): - from base64 import b64encode - - DATA = [{'message': {'data': b'testing 1 2 3'}}] - self._call_fut(DATA, b64encode, 'message') - self.assertEqual(DATA[0]['message']['data'], - b64encode(b'testing 1 2 3')) - - def test__transform_messages_base64_publish(self): - from base64 import b64encode - - DATA = [{'data': b'testing 1 2 3'}] - self._call_fut(DATA, b64encode) - self.assertEqual(DATA[0]['data'], b64encode(b'testing 1 2 3')) - - -class _Connection(object): - - _called_with = None - _no_response_error = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - from google.cloud.exceptions import NotFound - - self._called_with = kw - try: - response, self._responses = self._responses[0], self._responses[1:] - except IndexError: - err_class = self._no_response_error or NotFound - raise err_class('miss') - return response - - -class _Client(object): - - def __init__(self, connection, project): - self._connection = connection - self.project = project diff --git a/pubsub/tests/unit/test_client.py b/pubsub/tests/unit/test_client.py deleted file mode 100644 index 407683606330..000000000000 --- a/pubsub/tests/unit/test_client.py +++ /dev/null @@ -1,462 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestClient(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.client import Client - - return Client - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_publisher_api_wo_gax(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.publisher_api - - self.assertIsInstance(api, _PublisherAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - - def test_no_gax_ctor(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - with mock.patch('google.cloud.pubsub.client._USE_GRPC', - new=True): - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - self.assertFalse(client._use_grpc) - api = client.publisher_api - self.assertIsInstance(api, _PublisherAPI) - - def _publisher_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxPublisherAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_publisher_api=_generated_api, - GAXPublisherAPI=_GaxPublisherAPI) - with patch: - api = client.publisher_api - - self.assertIsInstance(api, _GaxPublisherAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_publisher_api_w_gax(self): - self._publisher_api_w_gax_helper() - - def test_publisher_api_w_gax_and_emulator(self): - self._publisher_api_w_gax_helper(emulator=True) - - def test_subscriber_api_wo_gax(self): - from google.cloud.pubsub._http import _SubscriberAPI - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.subscriber_api - - self.assertIsInstance(api, _SubscriberAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - - def _subscriber_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxSubscriberAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_subscriber_api=_generated_api, - GAXSubscriberAPI=_GaxSubscriberAPI) - with patch: - api = client.subscriber_api - - self.assertIsInstance(api, _GaxSubscriberAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_subscriber_api_w_gax(self): - self._subscriber_api_w_gax_helper() - - def test_subscriber_api_w_gax_and_emulator(self): - self._subscriber_api_w_gax_helper(emulator=True) - - def test_iam_policy_api(self): - from google.cloud.pubsub._http import _IAMPolicyAPI - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = _Connection() - - api = client.iam_policy_api - self.assertIsInstance(api, _IAMPolicyAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.iam_policy_api - self.assertIs(again, api) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_topics_with_paging(self): - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) - client._publisher_api = api - - iterator = client.list_topics(SIZE, TOKEN1) - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertEqual(next_page_token, TOKEN2) - - self.assertEqual(api._listed_topics, (self.PROJECT, 1, TOKEN1)) - - def test_list_topics_missing_key(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI() - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - returned = {'subscriptions': [SUB_INFO]} - client._connection = _Connection(returned) - - iterator = client.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_page_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': {}, - }) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - # Set up the mock response. - ACK_DEADLINE = 42 - PUSH_ENDPOINT = 'https://push.example.com/endpoint' - SUB_INFO = {'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}} - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - returned = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': TOKEN2, - } - client._connection = _Connection(returned) - - iterator = client.list_subscriptions( - SIZE, TOKEN1) - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_page_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, ACK_DEADLINE) - self.assertEqual(subscription.push_endpoint, PUSH_ENDPOINT) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': { - 'pageSize': SIZE, - 'pageToken': TOKEN1, - }, - }) - - def test_list_subscriptions_w_missing_key(self): - PROJECT = 'PROJECT' - creds = _make_credentials() - - client = self._make_one(project=PROJECT, credentials=creds) - client._connection = object() - api = client._subscriber_api = _FauxSubscriberAPI() - api._list_subscriptions_response = (), None - - subscriptions, next_page_token = client.list_subscriptions() - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_subscriptions, - (self.PROJECT, None, None)) - - def test_list_snapshots(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxSubscriberAPI() - response = api._list_snapshots_response = object() - client._subscriber_api = api - self.assertEqual(client.list_snapshots(), response) - self.assertEqual(api._listed_snapshots, (self.PROJECT, None, None)) - - def test_topic_factory(self): - PROJECT = 'PROJECT' - TOPIC_NAME = 'TOPIC_NAME' - creds = _make_credentials() - - client_obj = self._make_one(project=PROJECT, credentials=creds) - new_topic = client_obj.topic(TOPIC_NAME) - self.assertEqual(new_topic.name, TOPIC_NAME) - self.assertIs(new_topic._client, client_obj) - self.assertEqual(new_topic.project, PROJECT) - self.assertEqual(new_topic.full_name, - 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)) - self.assertFalse(new_topic.timestamp_messages) - - def test_subscription_factory(self): - project = 'PROJECT' - creds = _make_credentials() - client_obj = self._make_one(project=project, credentials=creds) - - sub_name = 'hoot-n-holler' - ack_deadline = 60, - push_endpoint = 'https://api.example.com/push' - message_retention_duration = datetime.timedelta(3600) - new_subscription = client_obj.subscription( - sub_name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=True, - message_retention_duration=message_retention_duration) - - self.assertEqual(new_subscription.name, sub_name) - self.assertIsNone(new_subscription.topic) - self.assertIs(new_subscription._client, client_obj) - self.assertEqual(new_subscription.project, project) - self.assertEqual(new_subscription.ack_deadline, ack_deadline) - self.assertEqual(new_subscription.push_endpoint, push_endpoint) - self.assertTrue(new_subscription.retain_acked_messages) - self.assertEqual( - new_subscription.message_retention_duration, - message_retention_duration) - - -class _Iterator(object): - - def __init__(self, items, token): - self._items = items or () - self.next_page_token = token - - def __iter__(self): - return iter(self._items) - - -class _FauxPublisherAPI(object): - - def __init__(self, items=None, token=None): - self._items = items - self._token = token - - def list_topics(self, project, page_size, page_token): - self._listed_topics = (project, page_size, page_token) - return _Iterator(self._items, self._token) - - -class _FauxSubscriberAPI(object): - - def list_subscriptions(self, project, page_size, page_token): - self._listed_subscriptions = (project, page_size, page_token) - return self._list_subscriptions_response - - def list_snapshots(self, project, page_size, page_token): - self._listed_snapshots = (project, page_size, page_token) - return self._list_snapshots_response - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/pubsub/tests/unit/test_iam.py b/pubsub/tests/unit/test_iam.py deleted file mode 100644 index 475d375d0cd8..000000000000 --- a/pubsub/tests/unit/test_iam.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestPolicy(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.iam import Policy - - return Policy - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - empty = frozenset() - policy = self._make_one() - self.assertIsNone(policy.etag) - self.assertIsNone(policy.version) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_ctor_explicit(self): - VERSION = 17 - ETAG = 'ETAG' - empty = frozenset() - policy = self._make_one(ETAG, VERSION) - self.assertEqual(policy.etag, ETAG) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_publishers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_PUBLISHER_ROLE, - ) - PUBLISHER = 'user:phred@example.com' - expected = set([PUBLISHER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.publishers = [PUBLISHER] - - self.assertEqual(policy.publishers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_PUBLISHER_ROLE: expected}) - - def test_subscribers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_SUBSCRIBER_ROLE, - ) - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - expected = set([SUBSCRIBER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.subscribers = [SUBSCRIBER] - - self.assertEqual(policy.subscribers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_SUBSCRIBER_ROLE: expected}) diff --git a/pubsub/tests/unit/test_message.py b/pubsub/tests/unit/test_message.py deleted file mode 100644 index b4f6abfbb1b2..000000000000 --- a/pubsub/tests/unit/test_message.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestMessage(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.message import Message - - return Message - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_ctor_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, ATTRS) - self.assertIsNone(message.service_timestamp) - - def test_timestamp_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_wo_timestamp_in_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_w_timestamp_in_attributes(self): - from datetime import datetime - from google.cloud._helpers import _RFC3339_MICROS - from google.cloud._helpers import UTC - - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - TIMESTAMP = '2015-04-10T18:42:27.131956Z' - naive = datetime.strptime(TIMESTAMP, _RFC3339_MICROS) - timestamp = naive.replace(tzinfo=UTC) - ATTRS = {'timestamp': TIMESTAMP} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.timestamp, timestamp) - - def test_from_api_repr_missing_data(self): - MESSAGE_ID = '12345' - api_repr = {'messageId': MESSAGE_ID} - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, b'') - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_from_api_repr_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(message.service_timestamp, TIMESTAMP) - - def test_from_api_repr_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - ATTRS = {'a': 'b'} - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - 'attributes': ATTRS, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.service_timestamp, TIMESTAMP) - self.assertEqual(message.attributes, ATTRS) diff --git a/pubsub/tests/unit/test_pubsub.py b/pubsub/tests/unit/test_pubsub.py new file mode 100644 index 000000000000..605dbddd7601 --- /dev/null +++ b/pubsub/tests/unit/test_pubsub.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import pubsub +from google.cloud import pubsub_v1 + + +def test_exported_things(): + assert pubsub.PublisherClient is pubsub_v1.PublisherClient + assert pubsub.SubscriberClient is pubsub_v1.SubscriberClient + assert pubsub.types is pubsub_v1.types diff --git a/pubsub/tests/unit/test_snpashot.py b/pubsub/tests/unit/test_snpashot.py deleted file mode 100644 index 5834a1fedd89..000000000000 --- a/pubsub/tests/unit/test_snpashot.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSnapshot(unittest.TestCase): - PROJECT = 'PROJECT' - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.snapshot import Snapshot - - return Snapshot - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - client = _Client(project=self.PROJECT) - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_w_subscription(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, - subscription=subscription) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_error(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - with self.assertRaises(TypeError): - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client, - subscription=subscription) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - klass = self._get_target_class() - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsInstance(snapshot.topic, Topic) - - def test_from_api_repr_w_deleted_topic(self): - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': klass._DELETED_TOPIC_PATH - } - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsNone(snapshot.topic) - - def test_from_api_repr_w_topics_w_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topics = {} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - topic = snapshot.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - - def test_from_api_repr_w_topics_w_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - self.assertIs(snapshot.topic, topic) - - def test_create_w_bound_client_error(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_response = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - with self.assertRaises(RuntimeError): - snapshot.create() - - def test_create_w_bound_subscription(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create() - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_create_w_bound_subscription_w_alternate_client(self): - client = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create(client=client2) - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - def test_delete_w_alternate_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self) - - -class _Topic(object): - - def __init__(self, name, client): - self._client = client - - -class _Subscription(object): - - def __init__(self, name, client=None): - self._client = client - self.full_name = 'projects/%s/subscriptions/%s' % ( - client.project, name, ) - - -class _FauxSubscriberAPI(object): - - def snapshot_create(self, snapshot_path, subscription_path): - self._snapshot_created = (snapshot_path, subscription_path, ) - - def snapshot_delete(self, snapshot_path): - self._snapshot_deleted = (snapshot_path, ) - - diff --git a/pubsub/tests/unit/test_subscription.py b/pubsub/tests/unit/test_subscription.py deleted file mode 100644 index ddf0ea439d77..000000000000 --- a/pubsub/tests/unit/test_subscription.py +++ /dev/null @@ -1,957 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSubscription(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'sub_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - DEADLINE = 42 - ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import Subscription - - return Subscription - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - def test_ctor_explicit(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_ctor_w_client_wo_topic(self): - client = _Client(project=self.PROJECT) - subscription = self._make_one(self.SUB_NAME, client=client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - - def test_ctor_w_both_topic_and_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME, topic, client=client2) - - def test_ctor_w_neither_topic_nor_client(self): - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_deleted_topic(self): - klass = self._get_target_class() - resource = {'topic': klass._DELETED_TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - topics = {} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_w_topic_match(self): - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - klass = self._get_target_class() - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_full_name_and_path(self): - PROJECT = 'PROJECT' - SUB_FULL = 'projects/%s/subscriptions/%s' % (PROJECT, self.SUB_NAME) - SUB_PATH = '/%s' % (SUB_FULL,) - TOPIC_NAME = 'topic_name' - CLIENT = _Client(project=PROJECT) - topic = _Topic(TOPIC_NAME, client=CLIENT) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.full_name, SUB_FULL) - self.assertEqual(subscription.path, SUB_PATH) - - def test_autoack_defaults(self): - from google.cloud.pubsub.subscription import AutoAck - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack() - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_autoack_explicit(self): - from google.cloud.pubsub.subscription import AutoAck - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack(True, 10, client2) - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, client2) - - def test_create_pull_wo_ack_deadline_w_bound_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.create() - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, None, None, None, None)) - - def test_create_push_w_ack_deadline_w_alternate_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT} - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.create(client=client2) - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, self.DEADLINE, self.ENDPOINT, - None, None)) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertFalse(subscription.exists()) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_exists_hit_w_alternate_client(self): - RESPONSE = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertTrue(subscription.exists(client=client2)) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_w_bound_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_sets_topic(self): - from google.cloud.pubsub.topic import Topic - - response = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = response - subscription = self._make_one(self.SUB_NAME, client=client) - - self.assertIsNone(subscription.topic) - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - - def test_reload_w_alternate_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.reload(client=client2) - - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_delete_w_bound_client(self): - RESPONSE = {} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.delete() - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_delete_w_alternate_client(self): - RESPONSE = {} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.delete(client=client2) - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_modify_push_config_w_endpoint_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_push_configuration(push_endpoint=self.ENDPOINT) - - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, self.ENDPOINT)) - - def test_modify_push_config_wo_endpoint_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - push_endpoint=self.ENDPOINT) - - subscription.modify_push_configuration(push_endpoint=None, - client=client2) - - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, None)) - - def test_pull_wo_return_immediately_max_messages_w_bound_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull() - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD, - 'attributes': {'a': 'b'}} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=True, max_messages=3, - client=client2) - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {'a': 'b'}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, True, 3)) - - def test_pull_wo_receivedMessages(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=False) - - self.assertEqual(len(pulled), 0) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_acknowledge_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2]) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_acknowledge_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2], client=client2) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_modify_ack_deadline_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline([ACK_ID1, ACK_ID2], self.DEADLINE) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_modify_ack_deadline_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline( - [ACK_ID1, ACK_ID2], self.DEADLINE, client=client2) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_snapshot(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - snapshot = subscription.snapshot(self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot.topic, topic) - - def test_seek_snapshot_w_bound_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - snapshot = Snapshot - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_snapshot_w_alternate_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client1) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot, client=client2) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_time_w_bound_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_seek_time_w_alternate_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time, client=client2) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = subscription.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.SUB_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = Policy() - new_policy = subscription.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - self.assertEqual(api._set_iam_policy, (self.SUB_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - -class _FauxSubscribererAPI(object): - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - self._subscription_created = ( - subscription_path, topic_path, ack_deadline, push_endpoint, - retain_acked_messages, message_retention_duration) - return self._subscription_create_response - - def subscription_get(self, subscription_path): - from google.cloud.exceptions import NotFound - - self._subscription_got = subscription_path - try: - return self._subscription_get_response - except AttributeError: - raise NotFound(subscription_path) - - def subscription_delete(self, subscription_path): - self._subscription_deleted = subscription_path - return self._subscription_delete_response - - def subscription_modify_push_config( - self, subscription_path, push_endpoint): - self._subscription_modified_push_config = ( - subscription_path, push_endpoint) - return self._subscription_modify_push_config_response - - def subscription_pull(self, subscription_path, return_immediately, - max_messages): - self._subscription_pulled = ( - subscription_path, return_immediately, max_messages) - return self._subscription_pull_response - - def subscription_acknowledge(self, subscription_path, ack_ids): - self._subscription_acked = (subscription_path, ack_ids) - return self._subscription_acknowlege_response - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - self._subscription_modified_ack_deadline = ( - subscription_path, ack_ids, ack_deadline) - return self._subscription_modify_ack_deadline_response - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - self._subscription_seeked = ( - subscription_path, time, snapshot) - return self._subscription_seek_response - - -class TestAutoAck(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import AutoAck - - return AutoAck - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_ctor_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, CLIENT) - - def test___enter___w_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, False) - self.assertEqual(subscription._max_messages, 1) - self.assertIsNone(subscription._client) - - def test___enter___w_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, True) - self.assertEqual(subscription._max_messages, 10) - self.assertIs(subscription._client, CLIENT) - - def test___exit___(self): - CLIENT = object() - ACK_ID1, MESSAGE1 = 'ACK_ID1', _FallibleMessage() - ACK_ID2, MESSAGE2 = 'ACK_ID2', _FallibleMessage() - ACK_ID3, MESSAGE3 = 'ACK_ID3', _FallibleMessage(True) - ITEMS = [ - (ACK_ID1, MESSAGE1), - (ACK_ID2, MESSAGE2), - (ACK_ID3, MESSAGE3), - ] - subscription = _FauxSubscription(ITEMS) - auto_ack = self._make_one(subscription, client=CLIENT) - with auto_ack: - for ack_id, message in list(auto_ack.items()): - if message.fail: - del auto_ack[ack_id] - self.assertEqual(sorted(subscription._acknowledged), - [ACK_ID1, ACK_ID2]) - self.assertIs(subscription._ack_client, CLIENT) - - def test_empty_ack_no_acknowledge(self): - subscription = mock.Mock(_FauxSubscription) - subscription.pull = lambda *args: [] - - auto_ack = self._make_one(subscription) - with auto_ack: - pass - - subscription.acknowledge.assert_not_called() - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name, client): - self.name = name - self._client = client - self.project = client.project - self.full_name = 'projects/%s/topics/%s' % (client.project, name) - self.path = '/projects/%s/topics/%s' % (client.project, name) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name, timestamp_messages=False): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - -class _FallibleMessage(object): - - def __init__(self, fail=False): - self.fail = fail - - -class _FauxSubscription(object): - - def __init__(self, items): - self._items = items - self._mapping = dict(items) - self._acknowledged = set() - - def pull(self, return_immediately=False, max_messages=1, client=None): - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - return self._items - - def acknowledge(self, ack_ids, client=None): - self._ack_client = client - for ack_id in ack_ids: - message = self._mapping[ack_id] - assert not message.fail - self._acknowledged.add(ack_id) diff --git a/pubsub/tests/unit/test_topic.py b/pubsub/tests/unit/test_topic.py deleted file mode 100644 index 2c90432195c2..000000000000 --- a/pubsub/tests/unit/test_topic.py +++ /dev/null @@ -1,974 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestTopic(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Topic - - return Topic - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_w_explicit_timestamp(self): - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, - client=client, - timestamp_messages=True) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertTrue(topic.timestamp_messages) - - def test_from_api_repr(self): - client = _Client(project=self.PROJECT) - resource = {'name': self.TOPIC_PATH} - klass = self._get_target_class() - topic = klass.from_api_repr(resource, client=client) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertIs(topic._client, client) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - - def test_from_api_repr_with_bad_client(self): - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' - client = _Client(project=PROJECT1) - PATH = 'projects/%s/topics/%s' % (PROJECT2, self.TOPIC_NAME) - resource = {'name': PATH} - klass = self._get_target_class() - self.assertRaises(ValueError, klass.from_api_repr, - resource, client=client) - - def test_create_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.create() - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_create_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.create(client=client2) - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - self.assertFalse(topic.exists()) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_exists_hit_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_get_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - self.assertTrue(topic.exists(client=client2)) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.delete() - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_delete_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.delete(client=client2) - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_publish_single_bytes_wo_attrs_w_bound_client(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): - import datetime - from google.cloud._helpers import _RFC3339_MICROS - - NOW = datetime.datetime.utcnow() - - def _utcnow(): - return NOW - - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = { - 'data': PAYLOAD, - 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - - topic = self._make_one(self.TOPIC_NAME, client=client1, - timestamp_messages=True) - with mock.patch('google.cloud.pubsub.topic._NOW', new=_utcnow): - msgid = topic.publish(PAYLOAD, client=client2) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - OVERRIDE = '2015-04-10T16:46:22.868399Z' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': OVERRIDE}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client, - timestamp_messages=True) - - msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_w_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD, attr1='value1', attr2='value2') - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_with_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_without_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_multiple_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_w_no_messages(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - pass - - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._api_called, 0) - - def test_publish_multiple_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = { - 'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - with topic.batch(client=client2) as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_multiple_error(self): - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - try: - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertEqual(list(batch), []) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_subscription(self): - from google.cloud.pubsub.subscription import Subscription - - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, client=client) - - SUBSCRIPTION_NAME = 'subscription_name' - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions_no_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - 'nextPageToken': TOKEN, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertEqual(next_page_token, TOKEN) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - PAGE_SIZE = 10 - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions( - page_size=PAGE_SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], - {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) - - def test_list_subscriptions_missing_key(self): - from google.cloud.pubsub.client import Client - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - client._connection = _Connection({}) - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client) - - policy = topic.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = topic.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, - 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, - 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, - 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, - 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, - 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = topic.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.TOPIC_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = Policy() - new_policy = topic.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - - self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = self._make_one(self.TOPIC_NAME, client=client) - - allowed = topic.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - allowed = topic.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - -class TestBatch(unittest.TestCase): - PROJECT = 'PROJECT' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Batch - - return Batch - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_ctor_defaults(self): - topic = _Topic() - client = _Client(project=self.PROJECT) - batch = self._make_one(topic, client) - self.assertIs(batch.topic, topic) - self.assertIs(batch.client, client) - self.assertEqual(len(batch.messages), 0) - self.assertEqual(len(batch.message_ids), 0) - - def test___iter___empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - self.assertEqual(list(batch), []) - - def test___iter___non_empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - batch.message_ids[:] = ['ONE', 'TWO', 'THREE'] - self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE']) - - def test_publish_bytes_wo_attrs(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {}} - client = _Client(project=self.PROJECT) - topic = _Topic() - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_publish_bytes_w_add_timestamp(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': 'TIMESTAMP'}} - client = _Client(project=self.PROJECT) - topic = _Topic(timestamp_messages=True) - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_commit_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, - 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit() - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_commit_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client1 = _Client(project='PROJECT') - client2 = _Client(project='PROJECT') - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client1) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit(client=client2) - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_success(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertIs(other, batch) - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_failure(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - topic = _Topic() - batch = self._make_one(topic, client=client) - - try: - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertIs(other, batch) - self.assertEqual(list(batch), []) - self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2]) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_batch_messages(self): - # Establish that a batch actually batches messsages in the expected - # way. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Check the contents of the batch. - self.assertEqual(batch.messages, [ - {'data': 'Batch message 0.', 'attributes': {}}, - {'data': 'Batch message 1.', 'attributes': {}}, - {'data': 'Batch message 2.', 'attributes': {}}, - {'data': 'Batch message 3.', 'attributes': {}}, - ]) - - def test_message_count_autocommit(self): - # Establish that if the batch is assigned to take a maximum - # number of messages, that it commits when it reaches that maximum. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_messages=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Publish a fifth message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - @mock.patch('time.time') - def test_message_time_autocommit(self, mock_time): - # Establish that if the batch is sufficiently old, that it commits - # the next time it receives a publish. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - mock_time.return_value = 0.0 - with self._make_one(topic, client=client, max_interval=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish some messages and establish that the batch does - # not commit. - for i in range(0, 10): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Move time ahead so that this batch is too old. - mock_time.return_value = 10.0 - - # Publish another message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - def test_message_size_autocommit(self): - # Establish that if the batch is sufficiently large, that it - # auto-commits. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_size=100) as batch: - self.assertIsInstance(batch, Batch) - - # Publish a short (< 100 bytes) message and establish that - # the batch does not commit. - batch.publish(b'foo') - commit.assert_not_called() - - # Publish another message and observe the commit. - batch.publish(u'The final call to trigger a commit, because ' - u'this message is sufficiently long.') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - -class _FauxPublisherAPI(object): - _api_called = 0 - - def topic_create(self, topic_path): - self._topic_created = topic_path - return self._topic_create_response - - def topic_get(self, topic_path): - from google.cloud.exceptions import NotFound - - self._topic_got = topic_path - try: - return self._topic_get_response - except AttributeError: - raise NotFound(topic_path) - - def topic_delete(self, topic_path): - self._topic_deleted = topic_path - return self._topic_delete_response - - def topic_publish(self, topic_path, messages): - self._topic_published = topic_path, messages - self._api_called += 1 - return self._topic_publish_response - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name="NAME", project="PROJECT", - timestamp_messages=False): - self.full_name = 'projects/%s/topics/%s' % (project, name) - self.path = '/%s' % (self.full_name,) - self.timestamp_messages = timestamp_messages - - def _timestamp_message(self, attrs): - if self.timestamp_messages: - attrs['timestamp'] = 'TIMESTAMP' - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - -class _Bugout(Exception): - pass - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response