From 2d26f4f4652e60a1008f6528c915cbbf68c19339 Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Wed, 26 Apr 2017 13:31:37 -0400 Subject: [PATCH 01/62] implement retries for read_rows --- bigtable/google/cloud/bigtable/retry.py | 178 +++++++++++++++++++++ bigtable/google/cloud/bigtable/row_data.py | 3 + bigtable/google/cloud/bigtable/table.py | 97 ++++------- bigtable/tests/retry_test_script.txt | 38 +++++ bigtable/tests/system.py | 70 ++++++++ bigtable/tests/unit/test_table.py | 14 +- 6 files changed, 325 insertions(+), 75 deletions(-) create mode 100644 bigtable/google/cloud/bigtable/retry.py create mode 100644 bigtable/tests/retry_test_script.txt diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py new file mode 100644 index 000000000000..845ac130c83e --- /dev/null +++ b/bigtable/google/cloud/bigtable/retry.py @@ -0,0 +1,178 @@ +"""Provides function wrappers that implement retrying.""" +import random +import time +import six + +from google.cloud._helpers import _to_bytes +from google.cloud.bigtable._generated import ( + bigtable_pb2 as data_messages_v2_pb2) +from google.gax import config, errors +from grpc import RpcError + +_MILLIS_PER_SECOND = 1000 + +def _has_timeout_settings(backoff_settings): + return (backoff_settings.rpc_timeout_multiplier is not None and + backoff_settings.max_rpc_timeout_millis is not None and + backoff_settings.total_timeout_millis is not None and + backoff_settings.initial_rpc_timeout_millis is not None) + +class ReadRowsIterator(): + """Creates an iterator equivalent to a_iter, but that retries on certain + exceptions. + """ + + def __init__(self, client, name, start_key, end_key, filter_, limit, + retry_options, **kwargs): + self.client = client + self.retry_options = retry_options + self.name = name + self.start_key = start_key + self.start_key_closed = True + self.end_key = end_key + self.filter_ = filter_ + self.limit = limit + + self.delay_mult = retry_options.backoff_settings.retry_delay_multiplier + self.max_delay_millis = retry_options.backoff_settings.max_retry_delay_millis + self.has_timeout_settings = _has_timeout_settings(retry_options.backoff_settings) + + if self.has_timeout_settings: + self.timeout_mult = retry_options.backoff_settings.rpc_timeout_multiplier + self.max_timeout = (retry_options.backoff_settings.max_rpc_timeout_millis / _MILLIS_PER_SECOND) + self.total_timeout = (retry_options.backoff_settings.total_timeout_millis / _MILLIS_PER_SECOND) + self.set_stream() + + def set_start_key(self, start_key): + """ + Sets the row key at which this iterator will begin reading. + """ + self.start_key = start_key + self.start_key_closed = False + + def set_stream(self): + """ + Resets the read stream by making an RPC on the 'ReadRows' endpoint. + """ + request_pb = _create_row_request( + self.name, start_key=self.start_key, + start_key_closed=self.start_key_closed, end_key=self.end_key, + filter_= self.filter_, limit=self.limit) + self.stream = self.client._data_stub.ReadRows(request_pb) + + def next(self, *args, **kwargs): + """ + Read and return the next row from the stream. Retry on idempotent failure. + """ + delay = self.retry_options.backoff_settings.initial_retry_delay_millis + exc = errors.RetryError('Retry total timeout exceeded before any' + 'response was received') + if self.has_timeout_settings: + timeout = ( + self.retry_options.backoff_settings.initial_rpc_timeout_millis / + _MILLIS_PER_SECOND) + + now = time.time() + deadline = now + self.total_timeout + else: + timeout = None + deadline = None + + while deadline is None or now < deadline: + try: + return six.next(self.stream) + except StopIteration as stop: + raise stop + except RpcError as error: # pylint: disable=broad-except + code = config.exc_to_code(error) + if code not in self.retry_options.retry_codes: + six.reraise(errors.RetryError, + errors.RetryError(str(error))) + + # pylint: disable=redefined-variable-type + exc = errors.RetryError( + 'Retry total timeout exceeded with exception', error) + + # Sleep a random number which will, on average, equal the + # expected delay. + to_sleep = random.uniform(0, delay * 2) + time.sleep(to_sleep / _MILLIS_PER_SECOND) + delay = min(delay * self.delay_mult, self.max_delay_millis) + + if self.has_timeout_settings: + now = time.time() + timeout = min( + timeout * self.timeout_mult, self.max_timeout, deadline - now) + self.set_stream() + + six.reraise(errors.RetryError, exc) + + def __next__(self, *args, **kwargs): + return self.next(*args, **kwargs) + + def __iter__(self): + return self + +def _create_row_request(table_name, row_key=None, start_key=None, + start_key_closed=True, end_key=None, filter_=None, + limit=None): + """Creates a request to read rows in a table. + + :type table_name: str + :param table_name: The name of the table to read from. + + :type row_key: bytes + :param row_key: (Optional) The key of a specific row to read from. + + :type start_key: bytes + :param start_key: (Optional) The beginning of a range of row keys to + read from. The range will include ``start_key``. If + left empty, will be interpreted as the empty string. + + :type end_key: bytes + :param end_key: (Optional) The end of a range of row keys to read from. + The range will not include ``end_key``. If left empty, + will be interpreted as an infinite string. + + :type filter_: :class:`.RowFilter` + :param filter_: (Optional) The filter to apply to the contents of the + specified row(s). If unset, reads the entire table. + + :type limit: int + :param limit: (Optional) The read will terminate after committing to N + rows' worth of results. The default (zero) is to return + all results. + + :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` + :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. + :raises: :class:`ValueError ` if both + ``row_key`` and one of ``start_key`` and ``end_key`` are set + """ + request_kwargs = {'table_name': table_name} + if (row_key is not None and + (start_key is not None or end_key is not None)): + raise ValueError('Row key and row range cannot be ' + 'set simultaneously') + range_kwargs = {} + if start_key is not None or end_key is not None: + if start_key is not None: + if start_key_closed: + range_kwargs['start_key_closed'] = _to_bytes(start_key) + else: + range_kwargs['start_key_open'] = _to_bytes(start_key) + if end_key is not None: + range_kwargs['end_key_open'] = _to_bytes(end_key) + if filter_ is not None: + request_kwargs['filter'] = filter_.to_pb() + if limit is not None: + request_kwargs['rows_limit'] = limit + + message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) + + if row_key is not None: + message.rows.row_keys.append(_to_bytes(row_key)) + + if range_kwargs: + message.rows.row_ranges.add(**range_kwargs) + + return message \ No newline at end of file diff --git a/bigtable/google/cloud/bigtable/row_data.py b/bigtable/google/cloud/bigtable/row_data.py index 60fc1f0ef1e8..d3c70d431e29 100644 --- a/bigtable/google/cloud/bigtable/row_data.py +++ b/bigtable/google/cloud/bigtable/row_data.py @@ -274,6 +274,9 @@ def consume_next(self): self._validate_chunk(chunk) + if ("ReadRowsIterator" in self._response_iterator.__class__.__name__): + self._response_iterator.set_start_key(chunk.row_key) + if chunk.reset_row: row = self._row = None cell = self._cell = self._previous_cell = None diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 3fbd198d6b65..8177f1e1e1d9 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -14,6 +14,7 @@ """User friendly container for Google Cloud Bigtable Table.""" +from __future__ import absolute_import from google.cloud._helpers import _to_bytes from google.cloud.bigtable._generated import ( bigtable_pb2 as data_messages_v2_pb2) @@ -27,7 +28,31 @@ from google.cloud.bigtable.row import ConditionalRow from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.row_data import PartialRowsData - +from google.gax import RetryOptions, BackoffSettings +from google.cloud.bigtable.retry import ReadRowsIterator, _create_row_request +from grpc import StatusCode + +import six + +BACKOFF_SETTINGS = BackoffSettings( + initial_retry_delay_millis = 10, + retry_delay_multiplier = 1.3, + max_retry_delay_millis = 30000, + initial_rpc_timeout_millis = 25 * 60 * 1000, + rpc_timeout_multiplier = 1.0, + max_rpc_timeout_millis = 25 * 60 * 1000, + total_timeout_millis = 30 * 60 * 1000 +) + +RETRY_OPTIONS = RetryOptions( + retry_codes = [ + StatusCode.DEADLINE_EXCEEDED, + StatusCode.ABORTED, + StatusCode.INTERNAL, + StatusCode.UNAVAILABLE + ], + backoff_settings = BACKOFF_SETTINGS +) class Table(object): """Representation of a Google Cloud Bigtable Table. @@ -268,13 +293,10 @@ def read_rows(self, start_key=None, end_key=None, limit=None, :returns: A :class:`.PartialRowsData` convenience wrapper for consuming the streamed results. """ - request_pb = _create_row_request( - self.name, start_key=start_key, end_key=end_key, filter_=filter_, - limit=limit) client = self._instance._client - response_iterator = client._data_stub.ReadRows(request_pb) - # We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse` - return PartialRowsData(response_iterator) + retrying_iterator = ReadRowsIterator(client, self.name, start_key, + end_key, filter_, limit, RETRY_OPTIONS) + return PartialRowsData(retrying_iterator) def sample_row_keys(self): """Read a sample of row keys in the table. @@ -312,64 +334,3 @@ def sample_row_keys(self): client = self._instance._client response_iterator = client._data_stub.SampleRowKeys(request_pb) return response_iterator - - -def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, - filter_=None, limit=None): - """Creates a request to read rows in a table. - - :type table_name: str - :param table_name: The name of the table to read from. - - :type row_key: bytes - :param row_key: (Optional) The key of a specific row to read from. - - :type start_key: bytes - :param start_key: (Optional) The beginning of a range of row keys to - read from. The range will include ``start_key``. If - left empty, will be interpreted as the empty string. - - :type end_key: bytes - :param end_key: (Optional) The end of a range of row keys to read from. - The range will not include ``end_key``. If left empty, - will be interpreted as an infinite string. - - :type filter_: :class:`.RowFilter` - :param filter_: (Optional) The filter to apply to the contents of the - specified row(s). If unset, reads the entire table. - - :type limit: int - :param limit: (Optional) The read will terminate after committing to N - rows' worth of results. The default (zero) is to return - all results. - - :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` - :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. - :raises: :class:`ValueError ` if both - ``row_key`` and one of ``start_key`` and ``end_key`` are set - """ - request_kwargs = {'table_name': table_name} - if (row_key is not None and - (start_key is not None or end_key is not None)): - raise ValueError('Row key and row range cannot be ' - 'set simultaneously') - range_kwargs = {} - if start_key is not None or end_key is not None: - if start_key is not None: - range_kwargs['start_key_closed'] = _to_bytes(start_key) - if end_key is not None: - range_kwargs['end_key_open'] = _to_bytes(end_key) - if filter_ is not None: - request_kwargs['filter'] = filter_.to_pb() - if limit is not None: - request_kwargs['rows_limit'] = limit - - message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) - - if row_key is not None: - message.rows.row_keys.append(_to_bytes(row_key)) - - if range_kwargs: - message.rows.row_ranges.add(**range_kwargs) - - return message diff --git a/bigtable/tests/retry_test_script.txt b/bigtable/tests/retry_test_script.txt new file mode 100644 index 000000000000..863662e897ba --- /dev/null +++ b/bigtable/tests/retry_test_script.txt @@ -0,0 +1,38 @@ +# This retry script is processed by the retry server and the client under test. +# Client tests should parse any command beginning with "CLIENT:", send the corresponding RPC +# to the retry server and expect a valid response. +# "EXPECT" commands indicate the call the server is expecting the client to send. +# +# The retry server has one table named "table" that should be used for testing. +# There are three types of commands supported: +# READ +# Expect the corresponding rows to be returned with arbitrary values. +# SCAN ... +# Ranges are expressed as an interval with either open or closed start and end, +# such as [1,3) for "1,2" or (1, 3] for "2,3". +# WRITE +# All writes should succeed eventually. Value payload is ignored. +# The server writes PASS or FAIL on a line by itself to STDOUT depending on the result of the test. +# All other server output should be ignored. + +# Echo same scan back after immediate error +CLIENT: SCAN [r1,r3) r1,r2 +EXPECT: SCAN [r1,r3) +SERVER: ERROR Unavailable +EXPECT: SCAN [r1,r3) +SERVER: READ_RESPONSE r1,r2 + +# Retry scans with open interval starting at the least read row key. +# Instead of using open intervals for retry ranges, '\x00' can be +# appended to the last received row key and sent in a closed interval. +CLIENT: SCAN [r1,r9) r1,r2,r3,r4,r5,r6,r7,r8 +EXPECT: SCAN [r1,r9) +SERVER: READ_RESPONSE r1,r2,r3,r4 +SERVER: ERROR Unavailable +EXPECT: SCAN (r4,r9) +SERVER: ERROR Unavailable +EXPECT: SCAN (r4,r9) +SERVER: READ_RESPONSE r5,r6,r7 +SERVER: ERROR Unavailable +EXPECT: SCAN (r7,r9) +SERVER: READ_RESPONSE r8 diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index faed85fdb302..3772aca721a1 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -295,6 +295,76 @@ def test_delete_column_family(self): # Make sure we have successfully deleted it. self.assertEqual(temp_table.list_column_families(), {}) + def test_retry(self): + import subprocess, os, stat, platform + from google.cloud.bigtable.client import Client + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable.table import Table + + # import for urlopen based on version + try: + # python 3 + from urllib.request import urlopen + except ImportError: + # python 2 + from urllib2 import urlopen + + + TEST_SCRIPT = 'tests/retry_test_script.txt' + SERVER_NAME = 'retry_server' + SERVER_ZIP = SERVER_NAME + ".tar.gz" + + def process_scan(table, range, ids): + range_chunks = range.split(",") + range_open = range_chunks[0].lstrip("[") + range_close = range_chunks[1].rstrip(")") + rows = table.read_rows(range_open, range_close) + rows.consume_all() + + # Download server + MOCK_SERVER_URLS = { + 'Linux': 'https://storage.googleapis.com/cloud-bigtable-test/retries/retry_server_linux.tar.gz', + 'Darwin': 'https://storage.googleapis.com/cloud-bigtable-test/retries/retry_server_mac.tar.gz', + } + + test_platform = platform.system() + if (test_platform not in MOCK_SERVER_URLS): + self.fail("Retry server not available for platform " + test_platform) + + mock_server_download = urlopen(MOCK_SERVER_URLS[test_platform]).read() + mock_server_file = open(SERVER_ZIP, 'wb') + mock_server_file.write(mock_server_download) + + # Unzip server + subprocess.call(['tar', 'zxvf', SERVER_ZIP, '-C', '.']) + + # Connect to server + server = subprocess.Popen( + ['./' + SERVER_NAME, '--script=' + TEST_SCRIPT], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + ) + + (endpoint, port) = server.stdout.readline().rstrip("\n").split(":") + os.environ["BIGTABLE_EMULATOR_HOST"] = endpoint + ":" + port + client = Client(project="client", admin=True) + instance = Instance("instance", client) + table = instance.table("table") + + # Run test, line by line + script = open(TEST_SCRIPT, 'r') + for line in script.readlines(): + if line.startswith("CLIENT:"): + chunks = line.split(" ") + op = chunks[1] + if (op != "SCAN"): + self.fail("Script contained " + op + " operation. Only \'SCAN\' is supported.") + else: + process_scan(table, chunks[2], chunks[3]) + + # Clean up + server.kill() + os.remove(SERVER_ZIP) + os.remove(SERVER_NAME) class TestDataAPI(unittest.TestCase): diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 63844f5d48b7..51adcef6ed17 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -352,7 +352,8 @@ def test_read_rows(self): from google.cloud._testing import _Monkey from tests.unit._testing import _FakeStub from google.cloud.bigtable.row_data import PartialRowsData - from google.cloud.bigtable import table as MUT + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator client = _Client() instance = _Instance(self.INSTANCE_NAME, client=client) @@ -372,20 +373,18 @@ def mock_create_row_request(table_name, **kwargs): # Patch the stub used by the API method. client._data_stub = stub = _FakeStub(response_iterator) - # Create expected_result. - expected_result = PartialRowsData(response_iterator) - - # Perform the method and check the result. start_key = b'start-key' end_key = b'end-key' filter_obj = object() limit = 22 - with _Monkey(MUT, _create_row_request=mock_create_row_request): + with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Perform the method and check the result. result = table.read_rows( start_key=start_key, end_key=end_key, filter_=filter_obj, limit=limit) - self.assertEqual(result, expected_result) + self.assertIsInstance(result._response_iterator, ReadRowsIterator) + self.assertEqual(result._response_iterator.client, client) self.assertEqual(stub.method_calls, [( 'ReadRows', (request_pb,), @@ -396,6 +395,7 @@ def mock_create_row_request(table_name, **kwargs): 'end_key': end_key, 'filter_': filter_obj, 'limit': limit, + 'start_key_closed': True, } self.assertEqual(mock_created, [(table.name, created_kwargs)]) From d25cafa21a379d9f5aef48b278c587a3e1404577 Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Thu, 27 Apr 2017 10:50:10 -0400 Subject: [PATCH 02/62] fix linter errors --- bigtable/google/cloud/bigtable/retry.py | 51 ++++++++++++++-------- bigtable/google/cloud/bigtable/row_data.py | 3 +- bigtable/google/cloud/bigtable/table.py | 27 ++++++------ 3 files changed, 47 insertions(+), 34 deletions(-) diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py index 845ac130c83e..98c876797b08 100644 --- a/bigtable/google/cloud/bigtable/retry.py +++ b/bigtable/google/cloud/bigtable/retry.py @@ -9,21 +9,24 @@ from google.gax import config, errors from grpc import RpcError + _MILLIS_PER_SECOND = 1000 + def _has_timeout_settings(backoff_settings): return (backoff_settings.rpc_timeout_multiplier is not None and backoff_settings.max_rpc_timeout_millis is not None and backoff_settings.total_timeout_millis is not None and backoff_settings.initial_rpc_timeout_millis is not None) + class ReadRowsIterator(): """Creates an iterator equivalent to a_iter, but that retries on certain exceptions. """ def __init__(self, client, name, start_key, end_key, filter_, limit, - retry_options, **kwargs): + retry_options, **kwargs): self.client = client self.retry_options = retry_options self.name = name @@ -34,13 +37,20 @@ def __init__(self, client, name, start_key, end_key, filter_, limit, self.limit = limit self.delay_mult = retry_options.backoff_settings.retry_delay_multiplier - self.max_delay_millis = retry_options.backoff_settings.max_retry_delay_millis - self.has_timeout_settings = _has_timeout_settings(retry_options.backoff_settings) + self.max_delay_millis = \ + retry_options.backoff_settings.max_retry_delay_millis + self.has_timeout_settings = \ + _has_timeout_settings(retry_options.backoff_settings) if self.has_timeout_settings: - self.timeout_mult = retry_options.backoff_settings.rpc_timeout_multiplier - self.max_timeout = (retry_options.backoff_settings.max_rpc_timeout_millis / _MILLIS_PER_SECOND) - self.total_timeout = (retry_options.backoff_settings.total_timeout_millis / _MILLIS_PER_SECOND) + self.timeout_mult = \ + retry_options.backoff_settings.rpc_timeout_multiplier + self.max_timeout = \ + (retry_options.backoff_settings.max_rpc_timeout_millis / + _MILLIS_PER_SECOND) + self.total_timeout = \ + (retry_options.backoff_settings.total_timeout_millis / + _MILLIS_PER_SECOND) self.set_stream() def set_start_key(self, start_key): @@ -54,23 +64,24 @@ def set_stream(self): """ Resets the read stream by making an RPC on the 'ReadRows' endpoint. """ - request_pb = _create_row_request( - self.name, start_key=self.start_key, - start_key_closed=self.start_key_closed, end_key=self.end_key, - filter_= self.filter_, limit=self.limit) - self.stream = self.client._data_stub.ReadRows(request_pb) + req_pb = _create_row_request(self.name, start_key=self.start_key, + start_key_closed=self.start_key_closed, + end_key=self.end_key, + filter_=self.filter_, limit=self.limit) + self.stream = self.client._data_stub.ReadRows(req_pb) def next(self, *args, **kwargs): """ - Read and return the next row from the stream. Retry on idempotent failure. + Read and return the next row from the stream. + Retry on idempotent failure. """ delay = self.retry_options.backoff_settings.initial_retry_delay_millis exc = errors.RetryError('Retry total timeout exceeded before any' 'response was received') if self.has_timeout_settings: - timeout = ( - self.retry_options.backoff_settings.initial_rpc_timeout_millis / - _MILLIS_PER_SECOND) + timeout = (self.retry_options.backoff_settings + .initial_rpc_timeout_millis / + _MILLIS_PER_SECOND) now = time.time() deadline = now + self.total_timeout @@ -86,8 +97,8 @@ def next(self, *args, **kwargs): except RpcError as error: # pylint: disable=broad-except code = config.exc_to_code(error) if code not in self.retry_options.retry_codes: - six.reraise(errors.RetryError, - errors.RetryError(str(error))) + six.reraise(errors.RetryError, + errors.RetryError(str(error))) # pylint: disable=redefined-variable-type exc = errors.RetryError( @@ -102,7 +113,8 @@ def next(self, *args, **kwargs): if self.has_timeout_settings: now = time.time() timeout = min( - timeout * self.timeout_mult, self.max_timeout, deadline - now) + timeout * self.timeout_mult, self.max_timeout, + deadline - now) self.set_stream() six.reraise(errors.RetryError, exc) @@ -113,6 +125,7 @@ def __next__(self, *args, **kwargs): def __iter__(self): return self + def _create_row_request(table_name, row_key=None, start_key=None, start_key_closed=True, end_key=None, filter_=None, limit=None): @@ -175,4 +188,4 @@ def _create_row_request(table_name, row_key=None, start_key=None, if range_kwargs: message.rows.row_ranges.add(**range_kwargs) - return message \ No newline at end of file + return message diff --git a/bigtable/google/cloud/bigtable/row_data.py b/bigtable/google/cloud/bigtable/row_data.py index d3c70d431e29..e1b1acd8c643 100644 --- a/bigtable/google/cloud/bigtable/row_data.py +++ b/bigtable/google/cloud/bigtable/row_data.py @@ -274,7 +274,8 @@ def consume_next(self): self._validate_chunk(chunk) - if ("ReadRowsIterator" in self._response_iterator.__class__.__name__): + if ("ReadRowsIterator" in + self._response_iterator.__class__.__name__): self._response_iterator.set_start_key(chunk.row_key) if chunk.reset_row: diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 8177f1e1e1d9..f196adf6bc80 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -15,7 +15,6 @@ """User friendly container for Google Cloud Bigtable Table.""" from __future__ import absolute_import -from google.cloud._helpers import _to_bytes from google.cloud.bigtable._generated import ( bigtable_pb2 as data_messages_v2_pb2) from google.cloud.bigtable._generated import ( @@ -32,28 +31,27 @@ from google.cloud.bigtable.retry import ReadRowsIterator, _create_row_request from grpc import StatusCode -import six - BACKOFF_SETTINGS = BackoffSettings( - initial_retry_delay_millis = 10, - retry_delay_multiplier = 1.3, - max_retry_delay_millis = 30000, - initial_rpc_timeout_millis = 25 * 60 * 1000, - rpc_timeout_multiplier = 1.0, - max_rpc_timeout_millis = 25 * 60 * 1000, - total_timeout_millis = 30 * 60 * 1000 + initial_retry_delay_millis=10, + retry_delay_multiplier=1.3, + max_retry_delay_millis=30000, + initial_rpc_timeout_millis=25 * 60 * 1000, + rpc_timeout_multiplier=1.0, + max_rpc_timeout_millis=25 * 60 * 1000, + total_timeout_millis=30 * 60 * 1000 ) - + RETRY_OPTIONS = RetryOptions( - retry_codes = [ + retry_codes=[ StatusCode.DEADLINE_EXCEEDED, StatusCode.ABORTED, StatusCode.INTERNAL, StatusCode.UNAVAILABLE ], - backoff_settings = BACKOFF_SETTINGS + backoff_settings=BACKOFF_SETTINGS ) + class Table(object): """Representation of a Google Cloud Bigtable Table. @@ -295,7 +293,8 @@ def read_rows(self, start_key=None, end_key=None, limit=None, """ client = self._instance._client retrying_iterator = ReadRowsIterator(client, self.name, start_key, - end_key, filter_, limit, RETRY_OPTIONS) + end_key, filter_, limit, + RETRY_OPTIONS) return PartialRowsData(retrying_iterator) def sample_row_keys(self): From 5a8fdc0404d213bdc31bc838ee53dde24400066d Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Mon, 8 May 2017 15:02:56 -0400 Subject: [PATCH 03/62] add some tests for read_rows retries --- bigtable/google/cloud/bigtable/retry.py | 55 ++++-------- bigtable/google/cloud/bigtable/table.py | 24 +++--- bigtable/tests/unit/_testing.py | 27 +++++- bigtable/tests/unit/test_table.py | 110 +++++++++++++++++++++++- 4 files changed, 166 insertions(+), 50 deletions(-) diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py index 98c876797b08..d3435e6a56d2 100644 --- a/bigtable/google/cloud/bigtable/retry.py +++ b/bigtable/google/cloud/bigtable/retry.py @@ -13,13 +13,6 @@ _MILLIS_PER_SECOND = 1000 -def _has_timeout_settings(backoff_settings): - return (backoff_settings.rpc_timeout_multiplier is not None and - backoff_settings.max_rpc_timeout_millis is not None and - backoff_settings.total_timeout_millis is not None and - backoff_settings.initial_rpc_timeout_millis is not None) - - class ReadRowsIterator(): """Creates an iterator equivalent to a_iter, but that retries on certain exceptions. @@ -35,23 +28,18 @@ def __init__(self, client, name, start_key, end_key, filter_, limit, self.end_key = end_key self.filter_ = filter_ self.limit = limit - self.delay_mult = retry_options.backoff_settings.retry_delay_multiplier self.max_delay_millis = \ retry_options.backoff_settings.max_retry_delay_millis - self.has_timeout_settings = \ - _has_timeout_settings(retry_options.backoff_settings) - - if self.has_timeout_settings: - self.timeout_mult = \ - retry_options.backoff_settings.rpc_timeout_multiplier - self.max_timeout = \ - (retry_options.backoff_settings.max_rpc_timeout_millis / - _MILLIS_PER_SECOND) - self.total_timeout = \ - (retry_options.backoff_settings.total_timeout_millis / - _MILLIS_PER_SECOND) - self.set_stream() + self.timeout_mult = \ + retry_options.backoff_settings.rpc_timeout_multiplier + self.max_timeout = \ + (retry_options.backoff_settings.max_rpc_timeout_millis / + _MILLIS_PER_SECOND) + self.total_timeout = \ + (retry_options.backoff_settings.total_timeout_millis / + _MILLIS_PER_SECOND) + self.set_stream() def set_start_key(self, start_key): """ @@ -78,17 +66,12 @@ def next(self, *args, **kwargs): delay = self.retry_options.backoff_settings.initial_retry_delay_millis exc = errors.RetryError('Retry total timeout exceeded before any' 'response was received') - if self.has_timeout_settings: - timeout = (self.retry_options.backoff_settings - .initial_rpc_timeout_millis / - _MILLIS_PER_SECOND) - - now = time.time() - deadline = now + self.total_timeout - else: - timeout = None - deadline = None + timeout = (self.retry_options.backoff_settings + .initial_rpc_timeout_millis / + _MILLIS_PER_SECOND) + now = time.time() + deadline = now + self.total_timeout while deadline is None or now < deadline: try: return six.next(self.stream) @@ -109,12 +92,10 @@ def next(self, *args, **kwargs): to_sleep = random.uniform(0, delay * 2) time.sleep(to_sleep / _MILLIS_PER_SECOND) delay = min(delay * self.delay_mult, self.max_delay_millis) - - if self.has_timeout_settings: - now = time.time() - timeout = min( - timeout * self.timeout_mult, self.max_timeout, - deadline - now) + now = time.time() + timeout = min( + timeout * self.timeout_mult, self.max_timeout, + deadline - now) self.set_stream() six.reraise(errors.RetryError, exc) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index f196adf6bc80..70bee43ca576 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -41,15 +41,12 @@ total_timeout_millis=30 * 60 * 1000 ) -RETRY_OPTIONS = RetryOptions( - retry_codes=[ - StatusCode.DEADLINE_EXCEEDED, - StatusCode.ABORTED, - StatusCode.INTERNAL, - StatusCode.UNAVAILABLE - ], - backoff_settings=BACKOFF_SETTINGS -) +RETRY_CODES = [ + StatusCode.DEADLINE_EXCEEDED, + StatusCode.ABORTED, + StatusCode.INTERNAL, + StatusCode.UNAVAILABLE +] class Table(object): @@ -264,7 +261,7 @@ def read_row(self, row_key, filter_=None): return rows_data.rows[row_key] def read_rows(self, start_key=None, end_key=None, limit=None, - filter_=None): + filter_=None, backoff_settings=None): """Read rows from this table. :type start_key: bytes @@ -292,6 +289,13 @@ def read_rows(self, start_key=None, end_key=None, limit=None, the streamed results. """ client = self._instance._client + if backoff_settings is None: + backoff_settings = BACKOFF_SETTINGS + RETRY_OPTIONS = RetryOptions( + retry_codes=RETRY_CODES, + backoff_settings=backoff_settings + ) + retrying_iterator = ReadRowsIterator(client, self.name, start_key, end_key, filter_, limit, RETRY_OPTIONS) diff --git a/bigtable/tests/unit/_testing.py b/bigtable/tests/unit/_testing.py index e67af6a1498c..7587c66c133b 100644 --- a/bigtable/tests/unit/_testing.py +++ b/bigtable/tests/unit/_testing.py @@ -14,7 +14,6 @@ """Mocks used to emulate gRPC generated objects.""" - class _FakeStub(object): """Acts as a gPRC stub.""" @@ -27,6 +26,16 @@ def __getattr__(self, name): # since __getattribute__ will handle them. return _MethodMock(name, self) +class _CustomFakeStub(object): + """Acts as a gRPC stub. Generates a result using an injected callable.""" + def __init__(self, result_callable): + self.result_callable = result_callable + self.method_calls = [] + + def __getattr__(self, name): + # We need not worry about attributes set in constructor + # since __getattribute__ will handle them. + return _CustomMethodMock(name, self) class _MethodMock(object): """Mock for API method attached to a gRPC stub. @@ -42,5 +51,19 @@ def __call__(self, *args, **kwargs): """Sync method meant to mock a gRPC stub request.""" self._stub.method_calls.append((self._name, args, kwargs)) curr_result, self._stub.results = (self._stub.results[0], - self._stub.results[1:]) + self._stub.results[1:]) return curr_result + +class _CustomMethodMock(object): + """ + Same as _MethodMock, but backed by an injected callable. + """ + + def __init__(self, name, stub): + self._name = name + self._stub = stub + + def __call__(self, *args, **kwargs): + """Sync method meant to mock a gRPC stub request.""" + self._stub.method_calls.append((self._name, args, kwargs)) + return self._stub.result_callable() diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 51adcef6ed17..860b005d7601 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -377,7 +377,7 @@ def mock_create_row_request(table_name, **kwargs): end_key = b'end-key' filter_obj = object() limit = 22 - with _Monkey(MUT, _create_row_request=mock_create_row_request): + with _Monkey(MUT, _create_row_request=mock_create_row_request): # Perform the method and check the result. result = table.read_rows( start_key=start_key, end_key=end_key, filter_=filter_obj, @@ -399,6 +399,114 @@ def mock_create_row_request(table_name, **kwargs): } self.assertEqual(mock_created, [(table.name, created_kwargs)]) + def test_read_rows_one_chunk(self): + from google.cloud._testing import _Monkey + from tests.unit._testing import _FakeStub + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator + from google.cloud.bigtable.row_data import Cell + from google.cloud.bigtable.row_data import PartialRowsData + + client = _Client() + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._make_one(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb + + # Create response_iterator + chunk = _ReadRowsResponseCellChunkPB( + row_key=self.ROW_KEY, + family_name=self.FAMILY_NAME, + qualifier=self.QUALIFIER, + timestamp_micros=self.TIMESTAMP_MICROS, + value=self.VALUE, + commit_row=True, + ) + response_pb = _ReadRowsResponsePB(chunks=[chunk]) + response_iterator = iter([response_pb]) + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_iterator) + + start_key = b'start-key' + end_key = b'end-key' + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Perform the method and check the result. + result = table.read_rows( + start_key=start_key, end_key=end_key, filter_=filter_obj, + limit=limit) + result.consume_next() + + def test_read_rows_retry_timeout(self): + from google.cloud._testing import _Monkey + from tests.unit._testing import _CustomFakeStub + from google.cloud.bigtable.row_data import PartialRowsData + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator + from google.gax import BackoffSettings + from google.gax.errors import RetryError + from grpc import StatusCode, RpcError + import time + + client = _Client() + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._make_one(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb + + # Create a slow response iterator to cause a timeout + class MockTimeoutError(RpcError): + def code(self): + return StatusCode.DEADLINE_EXCEEDED + + def _wait_then_raise(): + time.sleep(0.5) + raise MockTimeoutError() + + # Patch the stub used by the API method. The stub should create a new + # slow_iterator every time its queried. + def make_slow_iterator(): + return (_wait_then_raise() for i in range(10)) + client._data_stub = stub = _CustomFakeStub(make_slow_iterator) + + # Set to timeout before RPC completes + test_backoff_settings = BackoffSettings( + initial_retry_delay_millis=10, + retry_delay_multiplier=1.3, + max_retry_delay_millis=30000, + initial_rpc_timeout_millis=1000, + rpc_timeout_multiplier=1.0, + max_rpc_timeout_millis=25 * 60 * 1000, + total_timeout_millis=1000 + ) + + start_key = b'start-key' + end_key = b'end-key' + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Verify that a RetryError is thrown on read. + result = table.read_rows( + start_key=start_key, end_key=end_key, filter_=filter_obj, + limit=limit, backoff_settings=test_backoff_settings) + with self.assertRaises(RetryError): + result.consume_next() + + def test_sample_row_keys(self): from tests.unit._testing import _FakeStub From 537e8b684157d88742bc13887712f2ab77ac8931 Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Wed, 10 May 2017 19:32:52 -0400 Subject: [PATCH 04/62] add move test coverage for read_rows retries --- bigtable/google/cloud/bigtable/retry.py | 3 +- bigtable/tests/unit/test_table.py | 50 ++++++++++++++++++++++++- 2 files changed, 50 insertions(+), 3 deletions(-) diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py index d3435e6a56d2..187069969d77 100644 --- a/bigtable/google/cloud/bigtable/retry.py +++ b/bigtable/google/cloud/bigtable/retry.py @@ -80,8 +80,7 @@ def next(self, *args, **kwargs): except RpcError as error: # pylint: disable=broad-except code = config.exc_to_code(error) if code not in self.retry_options.retry_codes: - six.reraise(errors.RetryError, - errors.RetryError(str(error))) + six.reraise(type(error), error) # pylint: disable=redefined-variable-type exc = errors.RetryError( diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 860b005d7601..c64ed95f500d 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -443,7 +443,7 @@ def mock_create_row_request(table_name, **kwargs): result = table.read_rows( start_key=start_key, end_key=end_key, filter_=filter_obj, limit=limit) - result.consume_next() + result.consume_all() def test_read_rows_retry_timeout(self): from google.cloud._testing import _Monkey @@ -506,6 +506,54 @@ def make_slow_iterator(): with self.assertRaises(RetryError): result.consume_next() + def test_read_rows_non_idempotent_error_throws(self): + from google.cloud._testing import _Monkey + from tests.unit._testing import _CustomFakeStub + from google.cloud.bigtable.row_data import PartialRowsData + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator + from google.gax import BackoffSettings + from google.gax.errors import RetryError + from grpc import StatusCode, RpcError + import time + + client = _Client() + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._make_one(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb + + # Create response iterator that raises a non-idempotent exception + class MockNonIdempotentError(RpcError): + def code(self): + return StatusCode.RESOURCE_EXHAUSTED + + def _raise(): + raise MockNonIdempotentError() + + # Patch the stub used by the API method. The stub should create a new + # slow_iterator every time its queried. + def make_raising_iterator(): + return (_raise() for i in range(10)) + client._data_stub = stub = _CustomFakeStub(make_raising_iterator) + + start_key = b'start-key' + end_key = b'end-key' + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Verify that a RetryError is thrown on read. + result = table.read_rows( + start_key=start_key, end_key=end_key, filter_=filter_obj, + limit=limit) + with self.assertRaises(MockNonIdempotentError): + result.consume_next() def test_sample_row_keys(self): from tests.unit._testing import _FakeStub From bddf67343065880dfe1158712a778dc739589a42 Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Wed, 10 May 2017 19:47:25 -0400 Subject: [PATCH 05/62] correct import for _create_row_request in test_table.py --- bigtable/tests/unit/test_table.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index c64ed95f500d..5e4b8aaff3ee 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -588,7 +588,7 @@ class Test__create_row_request(unittest.TestCase): def _call_fut(self, table_name, row_key=None, start_key=None, end_key=None, filter_=None, limit=None): - from google.cloud.bigtable.table import _create_row_request + from google.cloud.bigtable.retry import _create_row_request return _create_row_request( table_name, row_key=row_key, start_key=start_key, end_key=end_key, From 9688b3cbebeb58e6faf84b730129d8f82ff167d2 Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Wed, 10 May 2017 19:58:58 -0400 Subject: [PATCH 06/62] add test coverage for start_key_closed in read_rows --- bigtable/tests/unit/test_table.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 5e4b8aaff3ee..369050776e57 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -587,12 +587,12 @@ def test_sample_row_keys(self): class Test__create_row_request(unittest.TestCase): def _call_fut(self, table_name, row_key=None, start_key=None, end_key=None, - filter_=None, limit=None): + start_key_closed=True, filter_=None, limit=None): from google.cloud.bigtable.retry import _create_row_request return _create_row_request( table_name, row_key=row_key, start_key=start_key, end_key=end_key, - filter_=filter_, limit=limit) + start_key_closed=start_key_closed, filter_=filter_, limit=limit) def test_table_name_only(self): table_name = 'table_name' @@ -615,7 +615,7 @@ def test_row_key(self): expected_result.rows.row_keys.append(row_key) self.assertEqual(result, expected_result) - def test_row_range_start_key(self): + def test_row_range_start_key_closed(self): table_name = 'table_name' start_key = b'start_key' result = self._call_fut(table_name, start_key=start_key) @@ -623,6 +623,15 @@ def test_row_range_start_key(self): expected_result.rows.row_ranges.add(start_key_closed=start_key) self.assertEqual(result, expected_result) + def test_row_range_start_key_open(self): + table_name = 'table_name' + start_key = b'start_key' + result = self._call_fut(table_name, start_key=start_key, + start_key_closed=False) + expected_result = _ReadRowsRequestPB(table_name=table_name) + expected_result.rows.row_ranges.add(start_key_open=start_key) + self.assertEqual(result, expected_result) + def test_row_range_end_key(self): table_name = 'table_name' end_key = b'end_key' From c4bad6ef887cccd09eb112cbde8226c2a610544d Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Wed, 10 May 2017 20:08:32 -0400 Subject: [PATCH 07/62] remove redundant ReadRowsIterator#__iter__ --- bigtable/google/cloud/bigtable/retry.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py index 187069969d77..379cb8393f84 100644 --- a/bigtable/google/cloud/bigtable/retry.py +++ b/bigtable/google/cloud/bigtable/retry.py @@ -102,10 +102,6 @@ def next(self, *args, **kwargs): def __next__(self, *args, **kwargs): return self.next(*args, **kwargs) - def __iter__(self): - return self - - def _create_row_request(table_name, row_key=None, start_key=None, start_key_closed=True, end_key=None, filter_=None, limit=None): From 68ef3af514a9b8787a17118d0e263fea6334390f Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Wed, 10 May 2017 20:19:28 -0400 Subject: [PATCH 08/62] add newline in retry.py for linter --- bigtable/google/cloud/bigtable/retry.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py index 379cb8393f84..c208afeee5b9 100644 --- a/bigtable/google/cloud/bigtable/retry.py +++ b/bigtable/google/cloud/bigtable/retry.py @@ -102,6 +102,7 @@ def next(self, *args, **kwargs): def __next__(self, *args, **kwargs): return self.next(*args, **kwargs) + def _create_row_request(table_name, row_key=None, start_key=None, start_key_closed=True, end_key=None, filter_=None, limit=None): From d0853127146bfd5a8c5eced435ea21f30fe7a4ac Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Fri, 9 Jun 2017 17:29:44 -0400 Subject: [PATCH 09/62] check test outcome --- bigtable/tests/system.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 3772aca721a1..785df5ccd9db 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -328,8 +328,8 @@ def process_scan(table, range, ids): } test_platform = platform.system() - if (test_platform not in MOCK_SERVER_URLS): - self.fail("Retry server not available for platform " + test_platform) + if test_platform not in MOCK_SERVER_URLS: + self.skip('Retry server not available for platform {0}.'.format(test_platform)) mock_server_download = urlopen(MOCK_SERVER_URLS[test_platform]).read() mock_server_file = open(SERVER_ZIP, 'wb') @@ -338,10 +338,11 @@ def process_scan(table, range, ids): # Unzip server subprocess.call(['tar', 'zxvf', SERVER_ZIP, '-C', '.']) - # Connect to server + # Connect to server server = subprocess.Popen( ['./' + SERVER_NAME, '--script=' + TEST_SCRIPT], stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, ) (endpoint, port) = server.stdout.readline().rstrip("\n").split(":") @@ -356,13 +357,20 @@ def process_scan(table, range, ids): if line.startswith("CLIENT:"): chunks = line.split(" ") op = chunks[1] - if (op != "SCAN"): - self.fail("Script contained " + op + " operation. Only \'SCAN\' is supported.") - else: - process_scan(table, chunks[2], chunks[3]) + process_scan(table, chunks[2], chunks[3]) - # Clean up + # Check that the test passed server.kill() + server_stdout_lines = [] + while True: + line = server.stdout.readline() + if line != '': + server_stdout_lines.append(line) + else: + break + self.assertEqual(server_stdout_lines[-1], "PASS\n") + + # Clean up os.remove(SERVER_ZIP) os.remove(SERVER_NAME) From 70d920fb325828a8d0217ee932f7c8ad6f49bdc4 Mon Sep 17 00:00:00 2001 From: Cal Peyser Date: Fri, 9 Jun 2017 17:41:53 -0400 Subject: [PATCH 10/62] address comments --- bigtable/google/cloud/bigtable/retry.py | 5 +++-- bigtable/google/cloud/bigtable/row_data.py | 3 +-- bigtable/tests/system.py | 12 ++++++------ bigtable/tests/unit/test_table.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py index c208afeee5b9..f20419ce4f8e 100644 --- a/bigtable/google/cloud/bigtable/retry.py +++ b/bigtable/google/cloud/bigtable/retry.py @@ -2,6 +2,7 @@ import random import time import six +import sys from google.cloud._helpers import _to_bytes from google.cloud.bigtable._generated import ( @@ -13,7 +14,7 @@ _MILLIS_PER_SECOND = 1000 -class ReadRowsIterator(): +class ReadRowsIterator(object): """Creates an iterator equivalent to a_iter, but that retries on certain exceptions. """ @@ -97,7 +98,7 @@ def next(self, *args, **kwargs): deadline - now) self.set_stream() - six.reraise(errors.RetryError, exc) + six.reraise(errors.RetryError, exc, sys.exc_info()[2]) def __next__(self, *args, **kwargs): return self.next(*args, **kwargs) diff --git a/bigtable/google/cloud/bigtable/row_data.py b/bigtable/google/cloud/bigtable/row_data.py index e1b1acd8c643..73451faaecd4 100644 --- a/bigtable/google/cloud/bigtable/row_data.py +++ b/bigtable/google/cloud/bigtable/row_data.py @@ -274,8 +274,7 @@ def consume_next(self): self._validate_chunk(chunk) - if ("ReadRowsIterator" in - self._response_iterator.__class__.__name__): + if hasattr(self._response_iterator, 'set_start_key'): self._response_iterator.set_start_key(chunk.row_key) if chunk.reset_row: diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 785df5ccd9db..cbc9b488edec 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -352,12 +352,12 @@ def process_scan(table, range, ids): table = instance.table("table") # Run test, line by line - script = open(TEST_SCRIPT, 'r') - for line in script.readlines(): - if line.startswith("CLIENT:"): - chunks = line.split(" ") - op = chunks[1] - process_scan(table, chunks[2], chunks[3]) + with open(TEST_SCRIPT, 'r') as script: + for line in script.readlines(): + if line.startswith("CLIENT:"): + chunks = line.split(" ") + op = chunks[1] + process_scan(table, chunks[2], chunks[3]) # Check that the test passed server.kill() diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 369050776e57..05c765594e62 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -474,7 +474,7 @@ def code(self): return StatusCode.DEADLINE_EXCEEDED def _wait_then_raise(): - time.sleep(0.5) + time.sleep(0.1) raise MockTimeoutError() # Patch the stub used by the API method. The stub should create a new @@ -486,7 +486,7 @@ def make_slow_iterator(): # Set to timeout before RPC completes test_backoff_settings = BackoffSettings( initial_retry_delay_millis=10, - retry_delay_multiplier=1.3, + retry_delay_multiplier=0.3, max_retry_delay_millis=30000, initial_rpc_timeout_millis=1000, rpc_timeout_multiplier=1.0, From 7cffbe085aa607d802573e33ad65c0a18b220579 Mon Sep 17 00:00:00 2001 From: calpeyser Date: Fri, 14 Jul 2017 09:58:20 -0400 Subject: [PATCH 11/62] remove extra import --- bigtable/google/cloud/bigtable/table.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 771799891a76..e176834ccc18 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -17,7 +17,6 @@ import six -from __future__ import absolute_import from google.cloud.bigtable._generated import ( bigtable_pb2 as data_messages_v2_pb2) from google.cloud.bigtable._generated import ( @@ -444,4 +443,4 @@ def _check_row_type(row): """ if not isinstance(row, DirectRow): raise TypeError('Bulk processing can not be applied for ' - 'conditional or append mutations.') \ No newline at end of file + 'conditional or append mutations.') From 19079c3243d1193ce8938fda60127e7e958d1801 Mon Sep 17 00:00:00 2001 From: calpeyser Date: Fri, 14 Jul 2017 10:03:29 -0400 Subject: [PATCH 12/62] adding newline in bigtable/table.py to satisfy linter --- bigtable/google/cloud/bigtable/table.py | 1 + 1 file changed, 1 insertion(+) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index e176834ccc18..3ed2d20ea975 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -382,6 +382,7 @@ def sample_row_keys(self): response_iterator = client._data_stub.SampleRowKeys(request_pb) return response_iterator + def _mutate_rows_request(table_name, rows): """Creates a request to mutate rows in a table. From 711905898539a3ba15d3b54e12975842e3eebcb5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 14 Jul 2017 14:22:49 -0700 Subject: [PATCH 13/62] Speech GAPIC to master (#3607) * Vendor the GAPIC for Speech. * Speech Partial Veneer (#3483) * Update to docs based on @dhermes catch. * Fix incorrect variable. * Fix the docs. * Style fixes to unit tests. * More PR review from me. --- docs/index.rst | 2 +- docs/speech/alternative.rst | 7 - docs/speech/client.rst | 7 - docs/speech/encoding.rst | 7 - docs/speech/gapic/api.rst | 6 + docs/speech/gapic/types.rst | 5 + docs/speech/{usage.rst => index.rst} | 228 +-- docs/speech/operation.rst | 7 - docs/speech/result.rst | 7 - docs/speech/sample.rst | 7 - setup.py | 4 +- speech/google/cloud/gapic/__init__.py | 1 + speech/google/cloud/gapic/speech/__init__.py | 1 + .../google/cloud/gapic/speech/v1/__init__.py | 0 speech/google/cloud/gapic/speech/v1/enums.py | 86 ++ .../cloud/gapic/speech/v1/speech_client.py | 285 ++++ .../gapic/speech/v1/speech_client_config.json | 43 + speech/google/cloud/proto/__init__.py | 1 + speech/google/cloud/proto/speech/__init__.py | 1 + .../google/cloud/proto/speech/v1/__init__.py | 1 + .../cloud/proto/speech/v1/cloud_speech_pb2.py | 1331 +++++++++++++++++ .../proto/speech/v1/cloud_speech_pb2_grpc.py | 86 ++ speech/google/cloud/speech/__init__.py | 20 +- speech/google/cloud/speech/_gax.py | 7 +- speech/google/cloud/speech/client.py | 11 + speech/google/cloud/speech_v1/__init__.py | 34 + speech/google/cloud/speech_v1/helpers.py | 88 ++ speech/google/cloud/speech_v1/types.py | 30 + speech/nox.py | 14 +- speech/setup.py | 34 +- speech/tests/gapic/test_speech_client_v1.py | 212 +++ speech/tests/system.py | 33 +- speech/tests/unit/test__gax.py | 23 +- speech/tests/unit/test_client.py | 182 +-- speech/tests/unit/test_helpers.py | 66 + 35 files changed, 2589 insertions(+), 288 deletions(-) delete mode 100644 docs/speech/alternative.rst delete mode 100644 docs/speech/client.rst delete mode 100644 docs/speech/encoding.rst create mode 100644 docs/speech/gapic/api.rst create mode 100644 docs/speech/gapic/types.rst rename docs/speech/{usage.rst => index.rst} (50%) delete mode 100644 docs/speech/operation.rst delete mode 100644 docs/speech/result.rst delete mode 100644 docs/speech/sample.rst create mode 100644 speech/google/cloud/gapic/__init__.py create mode 100644 speech/google/cloud/gapic/speech/__init__.py create mode 100644 speech/google/cloud/gapic/speech/v1/__init__.py create mode 100644 speech/google/cloud/gapic/speech/v1/enums.py create mode 100644 speech/google/cloud/gapic/speech/v1/speech_client.py create mode 100644 speech/google/cloud/gapic/speech/v1/speech_client_config.json create mode 100644 speech/google/cloud/proto/__init__.py create mode 100644 speech/google/cloud/proto/speech/__init__.py create mode 100644 speech/google/cloud/proto/speech/v1/__init__.py create mode 100644 speech/google/cloud/proto/speech/v1/cloud_speech_pb2.py create mode 100644 speech/google/cloud/proto/speech/v1/cloud_speech_pb2_grpc.py create mode 100644 speech/google/cloud/speech_v1/__init__.py create mode 100644 speech/google/cloud/speech_v1/helpers.py create mode 100644 speech/google/cloud/speech_v1/types.py create mode 100644 speech/tests/gapic/test_speech_client_v1.py create mode 100644 speech/tests/unit/test_helpers.py diff --git a/docs/index.rst b/docs/index.rst index 623af475c568..8c81cefdda2e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,7 +12,7 @@ resource-manager/api runtimeconfig/usage spanner/usage - speech/usage + speech/index error-reporting/usage monitoring/usage logging/usage diff --git a/docs/speech/alternative.rst b/docs/speech/alternative.rst deleted file mode 100644 index 7c287b8dfa44..000000000000 --- a/docs/speech/alternative.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Alternative -================== - -.. automodule:: google.cloud.speech.alternative - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/client.rst b/docs/speech/client.rst deleted file mode 100644 index 4e6caad90ff3..000000000000 --- a/docs/speech/client.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Client -============= - -.. automodule:: google.cloud.speech.client - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/encoding.rst b/docs/speech/encoding.rst deleted file mode 100644 index affe80a4ebd2..000000000000 --- a/docs/speech/encoding.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Encoding -=============== - -.. automodule:: google.cloud.speech.encoding - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/gapic/api.rst b/docs/speech/gapic/api.rst new file mode 100644 index 000000000000..ded919fcbdcd --- /dev/null +++ b/docs/speech/gapic/api.rst @@ -0,0 +1,6 @@ +Speech Client API +================= + +.. automodule:: google.cloud.speech_v1 + :members: + :inherited-members: diff --git a/docs/speech/gapic/types.rst b/docs/speech/gapic/types.rst new file mode 100644 index 000000000000..0ddf83d3bb60 --- /dev/null +++ b/docs/speech/gapic/types.rst @@ -0,0 +1,5 @@ +Speech Client Types +=================== + +.. automodule:: google.cloud.speech_v1.types + :members: diff --git a/docs/speech/usage.rst b/docs/speech/index.rst similarity index 50% rename from docs/speech/usage.rst rename to docs/speech/index.rst index a651965e9e18..9373e830cff3 100644 --- a/docs/speech/usage.rst +++ b/docs/speech/index.rst @@ -1,16 +1,6 @@ +###### Speech -====== - -.. toctree:: - :maxdepth: 2 - :hidden: - - client - encoding - operation - result - sample - alternative +###### The `Google Speech`_ API enables developers to convert audio to text. The API recognizes over 80 languages and variants, to support your global user @@ -18,10 +8,11 @@ base. .. _Google Speech: https://cloud.google.com/speech/docs/getting-started -Client ------- -:class:`~google.cloud.speech.client.Client` objects provide a +Authentication and Configuration +-------------------------------- + +:class:`~google.cloud.speech_v1.SpeechClient` objects provide a means to configure your application. Each instance holds an authenticated connection to the Cloud Speech Service. @@ -29,21 +20,22 @@ For an overview of authentication in ``google-cloud-python``, see :doc:`/core/auth`. Assuming your environment is set up as described in that document, -create an instance of :class:`~google.cloud.speech.client.Client`. +create an instance of :class:`~.speech_v1.SpeechClient`. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() + >>> client = speech.SpeechClient() Asynchronous Recognition ------------------------ -The :meth:`~google.cloud.speech.Client.long_running_recognize` sends audio -data to the Speech API and initiates a Long Running Operation. Using this -operation, you can periodically poll for recognition results. Use asynchronous -requests for audio data of any duration up to 80 minutes. +The :meth:`~.speech_v1.SpeechClient.long_running_recognize` method +sends audio data to the Speech API and initiates a Long Running Operation. + +Using this operation, you can periodically poll for recognition results. +Use asynchronous requests for audio data of any duration up to 80 minutes. See: `Speech Asynchronous Recognize`_ @@ -52,13 +44,16 @@ See: `Speech Asynchronous Recognize`_ >>> import time >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate_hertz=44100) - >>> operation = sample.long_running_recognize( - ... language_code='en-US', - ... max_alternatives=2, + >>> client = speech.SpeechClient() + >>> operation = client.long_running_recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ), ... ) >>> retry_count = 100 >>> while retry_count > 0 and not operation.complete: @@ -80,7 +75,7 @@ See: `Speech Asynchronous Recognize`_ Synchronous Recognition ----------------------- -The :meth:`~google.cloud.speech.Client.recognize` method converts speech +The :meth:`~.speech_v1.SpeechClient.recognize` method converts speech data to text and returns alternative text transcriptions. This example uses ``language_code='en-GB'`` to better recognize a dialect from @@ -89,12 +84,17 @@ Great Britain. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.FLAC, - ... sample_rate_hertz=44100) - >>> results = sample.recognize( - ... language_code='en-GB', max_alternatives=2) + >>> client = speech.SpeechClient() + >>> results = client.recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ), + ... ) >>> for result in results: ... for alternative in result.alternatives: ... print('=' * 20) @@ -112,14 +112,17 @@ Example of using the profanity filter. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.FLAC, - ... sample_rate_hertz=44100) - >>> results = sample.recognize( - ... language_code='en-US', - ... max_alternatives=1, - ... profanity_filter=True, + >>> client = speech.SpeechClient() + >>> results = client.recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... profanity_filter=True, + ... sample_rate_hertz=44100, + ... ), ... ) >>> for result in results: ... for alternative in result.alternatives: @@ -137,15 +140,20 @@ words to the vocabulary of the recognizer. .. code-block:: python >>> from google.cloud import speech - >>> client = speech.Client() - >>> sample = client.sample(source_uri='gs://my-bucket/recording.flac', - ... encoding=speech.Encoding.FLAC, - ... sample_rate_hertz=44100) - >>> hints = ['hi', 'good afternoon'] - >>> results = sample.recognize( - ... language_code='en-US', - ... max_alternatives=2, - ... speech_contexts=hints, + >>> from google.cloud import speech + >>> client = speech.SpeechClient() + >>> results = client.recognize( + ... audio=speech.types.RecognitionAudio( + ... uri='gs://my-bucket/recording.flac', + ... ), + ... config=speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... speech_contexts=[speech.types.SpeechContext( + ... phrases=['hi', 'good afternoon'], + ... )], + ... ), ... ) >>> for result in results: ... for alternative in result.alternatives: @@ -160,7 +168,7 @@ words to the vocabulary of the recognizer. Streaming Recognition --------------------- -The :meth:`~google.cloud.speech.Client.streaming_recognize` method converts +The :meth:`~speech_v1.SpeechClient.streaming_recognize` method converts speech data to possible text alternatives on the fly. .. note:: @@ -170,18 +178,27 @@ speech data to possible text alternatives on the fly. .. code-block:: python + >>> import io >>> from google.cloud import speech - >>> client = speech.Client() - >>> with open('./hello.wav', 'rb') as stream: - ... sample = client.sample(stream=stream, - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate_hertz=16000) - ... results = sample.streaming_recognize(language_code='en-US') - ... for result in results: - ... for alternative in result.alternatives: - ... print('=' * 20) - ... print('transcript: ' + alternative.transcript) - ... print('confidence: ' + str(alternative.confidence)) + >>> client = speech.SpeechClient() + >>> config = speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ) + >>> with io.open('./hello.wav', 'rb') as stream: + ... requests = [speech.types.StreamingRecognizeRequest( + ... audio_content=stream.read(), + ... )] + >>> results = sample.streaming_recognize( + ... config=speech.types.StreamingRecognitionConfig(config=config), + ... requests, + ... ) + >>> for result in results: + ... for alternative in result.alternatives: + ... print('=' * 20) + ... print('transcript: ' + alternative.transcript) + ... print('confidence: ' + str(alternative.confidence)) ==================== transcript: hello thank you for using Google Cloud platform confidence: 0.927983105183 @@ -193,20 +210,36 @@ until the client closes the output stream or until the maximum time limit has been reached. If you only want to recognize a single utterance you can set - ``single_utterance`` to :data:`True` and only one result will be returned. +``single_utterance`` to :data:`True` and only one result will be returned. See: `Single Utterance`_ .. code-block:: python - >>> with open('./hello_pause_goodbye.wav', 'rb') as stream: - ... sample = client.sample(stream=stream, - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate_hertz=16000) - ... results = sample.streaming_recognize( - ... language_code='en-US', - ... single_utterance=True, - ... ) + >>> import io + >>> from google.cloud import speech + >>> client = speech.SpeechClient() + >>> config = speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ) + >>> with io.open('./hello-pause-goodbye.wav', 'rb') as stream: + ... requests = [speech.types.StreamingRecognizeRequest( + ... audio_content=stream.read(), + ... )] + >>> results = sample.streaming_recognize( + ... config=speech.types.StreamingRecognitionConfig( + ... config=config, + ... single_utterance=False, + ... ), + ... requests, + ... ) + >>> for result in results: + ... for alternative in result.alternatives: + ... print('=' * 20) + ... print('transcript: ' + alternative.transcript) + ... print('confidence: ' + str(alternative.confidence)) ... for result in results: ... for alternative in result.alternatives: ... print('=' * 20) @@ -221,22 +254,31 @@ If ``interim_results`` is set to :data:`True`, interim results .. code-block:: python + >>> import io >>> from google.cloud import speech - >>> client = speech.Client() - >>> with open('./hello.wav', 'rb') as stream: - ... sample = client.sample(stream=stream, - ... encoding=speech.Encoding.LINEAR16, - ... sample_rate=16000) - ... results = sample.streaming_recognize( - ... interim_results=True, - ... language_code='en-US', - ... ) - ... for result in results: - ... for alternative in result.alternatives: - ... print('=' * 20) - ... print('transcript: ' + alternative.transcript) - ... print('confidence: ' + str(alternative.confidence)) - ... print('is_final:' + str(result.is_final)) + >>> client = speech.SpeechClient() + >>> config = speech.types.RecognitionConfig( + ... encoding='LINEAR16', + ... language_code='en-US', + ... sample_rate_hertz=44100, + ... ) + >>> with io.open('./hello.wav', 'rb') as stream: + ... requests = [speech.types.StreamingRecognizeRequest( + ... audio_content=stream.read(), + ... )] + >>> results = sample.streaming_recognize( + ... config=speech.types.StreamingRecognitionConfig( + ... config=config, + ... iterim_results=True, + ... ), + ... requests, + ... ) + >>> for result in results: + ... for alternative in result.alternatives: + ... print('=' * 20) + ... print('transcript: ' + alternative.transcript) + ... print('confidence: ' + str(alternative.confidence)) + ... print('is_final:' + str(result.is_final)) ==================== 'he' None @@ -254,3 +296,13 @@ If ``interim_results`` is set to :data:`True`, interim results .. _Single Utterance: https://cloud.google.com/speech/reference/rpc/google.cloud.speech.v1beta1#streamingrecognitionconfig .. _sync_recognize: https://cloud.google.com/speech/reference/rest/v1beta1/speech/syncrecognize .. _Speech Asynchronous Recognize: https://cloud.google.com/speech/reference/rest/v1beta1/speech/asyncrecognize + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + gapic/api + gapic/types diff --git a/docs/speech/operation.rst b/docs/speech/operation.rst deleted file mode 100644 index 5c0ec3b92b12..000000000000 --- a/docs/speech/operation.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Operation -================ - -.. automodule:: google.cloud.speech.operation - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/result.rst b/docs/speech/result.rst deleted file mode 100644 index d4759b704199..000000000000 --- a/docs/speech/result.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Result -============= - -.. automodule:: google.cloud.speech.result - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/speech/sample.rst b/docs/speech/sample.rst deleted file mode 100644 index f0b4098ba4ca..000000000000 --- a/docs/speech/sample.rst +++ /dev/null @@ -1,7 +0,0 @@ -Speech Sample -============= - -.. automodule:: google.cloud.speech.sample - :members: - :undoc-members: - :show-inheritance: diff --git a/setup.py b/setup.py index 6977c6151ddc..ca6491ec530e 100644 --- a/setup.py +++ b/setup.py @@ -63,7 +63,7 @@ 'google-cloud-resource-manager >= 0.25.0, < 0.26dev', 'google-cloud-runtimeconfig >= 0.25.0, < 0.26dev', 'google-cloud-spanner >= 0.25.0, < 0.26dev', - 'google-cloud-speech >= 0.26.0, < 0.27dev', + 'google-cloud-speech >= 0.27.0, < 0.28dev', 'google-cloud-storage >= 1.2.0, < 1.3dev', 'google-cloud-translate >= 0.25.0, < 0.26dev', 'google-cloud-videointelligence >= 0.25.0, < 0.26dev', @@ -72,7 +72,7 @@ setup( name='google-cloud', - version='0.26.1', + version='0.26.2', description='API Client library for Google Cloud', long_description=README, install_requires=REQUIREMENTS, diff --git a/speech/google/cloud/gapic/__init__.py b/speech/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/gapic/speech/__init__.py b/speech/google/cloud/gapic/speech/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/gapic/speech/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/gapic/speech/v1/__init__.py b/speech/google/cloud/gapic/speech/v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/speech/google/cloud/gapic/speech/v1/enums.py b/speech/google/cloud/gapic/speech/v1/enums.py new file mode 100644 index 000000000000..98379c7078a9 --- /dev/null +++ b/speech/google/cloud/gapic/speech/v1/enums.py @@ -0,0 +1,86 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class RecognitionConfig(object): + class AudioEncoding(object): + """ + Audio encoding of the data sent in the audio message. All encodings support + only 1 channel (mono) audio. Only ``FLAC`` includes a header that describes + the bytes of audio that follow the header. The other encodings are raw + audio bytes with no header. + + For best results, the audio source should be captured and transmitted using + a lossless encoding (``FLAC`` or ``LINEAR16``). Recognition accuracy may be + reduced if lossy codecs, which include the other codecs listed in + this section, are used to capture or transmit the audio, particularly if + background noise is present. + + Attributes: + ENCODING_UNSPECIFIED (int): Not specified. Will return result ``google.rpc.Code.INVALID_ARGUMENT``. + LINEAR16 (int): Uncompressed 16-bit signed little-endian samples (Linear PCM). + FLAC (int): ```FLAC`` `_ (Free Lossless Audio + Codec) is the recommended encoding because it is + lossless--therefore recognition is not compromised--and + requires only about half the bandwidth of ``LINEAR16``. ``FLAC`` stream + encoding supports 16-bit and 24-bit samples, however, not all fields in + ``STREAMINFO`` are supported. + MULAW (int): 8-bit samples that compand 14-bit audio samples using G.711 PCMU/mu-law. + AMR (int): Adaptive Multi-Rate Narrowband codec. ``sample_rate_hertz`` must be 8000. + AMR_WB (int): Adaptive Multi-Rate Wideband codec. ``sample_rate_hertz`` must be 16000. + OGG_OPUS (int): Opus encoded audio frames in Ogg container + (`OggOpus `_). + ``sample_rate_hertz`` must be 16000. + SPEEX_WITH_HEADER_BYTE (int): Although the use of lossy encodings is not recommended, if a very low + bitrate encoding is required, ``OGG_OPUS`` is highly preferred over + Speex encoding. The `Speex `_ encoding supported by + Cloud Speech API has a header byte in each block, as in MIME type + ``audio/x-speex-with-header-byte``. + It is a variant of the RTP Speex encoding defined in + `RFC 5574 `_. + The stream is a sequence of blocks, one block per RTP packet. Each block + starts with a byte containing the length of the block, in bytes, followed + by one or more frames of Speex data, padded to an integral number of + bytes (octets) as specified in RFC 5574. In other words, each RTP header + is replaced with a single byte containing the block length. Only Speex + wideband is supported. ``sample_rate_hertz`` must be 16000. + """ + ENCODING_UNSPECIFIED = 0 + LINEAR16 = 1 + FLAC = 2 + MULAW = 3 + AMR = 4 + AMR_WB = 5 + OGG_OPUS = 6 + SPEEX_WITH_HEADER_BYTE = 7 + + +class StreamingRecognizeResponse(object): + class SpeechEventType(object): + """ + Indicates the type of speech event. + + Attributes: + SPEECH_EVENT_UNSPECIFIED (int): No speech event specified. + END_OF_SINGLE_UTTERANCE (int): This event indicates that the server has detected the end of the user's + speech utterance and expects no additional speech. Therefore, the server + will not process additional audio (although it may subsequently return + additional results). The client should stop sending additional audio + data, half-close the gRPC connection, and wait for any additional results + until the server closes the gRPC connection. This event is only sent if + ``single_utterance`` was set to ``true``, and is not used otherwise. + """ + SPEECH_EVENT_UNSPECIFIED = 0 + END_OF_SINGLE_UTTERANCE = 1 diff --git a/speech/google/cloud/gapic/speech/v1/speech_client.py b/speech/google/cloud/gapic/speech/v1/speech_client.py new file mode 100644 index 000000000000..3806330b25bb --- /dev/null +++ b/speech/google/cloud/gapic/speech/v1/speech_client.py @@ -0,0 +1,285 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/speech/v1/cloud_speech.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.speech.v1 Speech API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gapic.longrunning import operations_client +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +from google.gax.utils import oneof +import google.gax + +from google.cloud.gapic.speech.v1 import enums +from google.cloud.proto.speech.v1 import cloud_speech_pb2 + + +class SpeechClient(object): + """Service that implements Google Cloud Speech API.""" + + SERVICE_ADDRESS = 'speech.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A SpeechClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-speech', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'speech_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.cloud.speech.v1.Speech', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.speech_stub = config.create_stub( + cloud_speech_pb2.SpeechStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self.operations_client = operations_client.OperationsClient( + service_path=service_path, + port=port, + channel=channel, + credentials=credentials, + ssl_credentials=ssl_credentials, + scopes=scopes, + client_config=client_config, + metrics_headers=metrics_headers, ) + + self._recognize = api_callable.create_api_call( + self.speech_stub.Recognize, settings=defaults['recognize']) + self._long_running_recognize = api_callable.create_api_call( + self.speech_stub.LongRunningRecognize, + settings=defaults['long_running_recognize']) + self._streaming_recognize = api_callable.create_api_call( + self.speech_stub.StreamingRecognize, + settings=defaults['streaming_recognize']) + + # Service calls + def recognize(self, config, audio, options=None): + """ + Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + + Example: + >>> from google.cloud.gapic.speech.v1 import speech_client + >>> from google.cloud.gapic.speech.v1 import enums + >>> from google.cloud.proto.speech.v1 import cloud_speech_pb2 + >>> client = speech_client.SpeechClient() + >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC + >>> sample_rate_hertz = 44100 + >>> language_code = 'en-US' + >>> config = cloud_speech_pb2.RecognitionConfig(encoding=encoding, sample_rate_hertz=sample_rate_hertz, language_code=language_code) + >>> uri = 'gs://bucket_name/file_name.flac' + >>> audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + >>> response = client.recognize(config, audio) + + Args: + config (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionConfig`): *Required* Provides information to the recognizer that specifies how to + process the request. + audio (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionAudio`): *Required* The audio data to be recognized. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognizeResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = cloud_speech_pb2.RecognizeRequest(config=config, audio=audio) + return self._recognize(request, options) + + def long_running_recognize(self, config, audio, options=None): + """ + Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + ``Operation.error`` or an ``Operation.response`` which contains + a ``LongRunningRecognizeResponse`` message. + + Example: + >>> from google.cloud.gapic.speech.v1 import speech_client + >>> from google.cloud.gapic.speech.v1 import enums + >>> from google.cloud.proto.speech.v1 import cloud_speech_pb2 + >>> client = speech_client.SpeechClient() + >>> encoding = enums.RecognitionConfig.AudioEncoding.FLAC + >>> sample_rate_hertz = 44100 + >>> language_code = 'en-US' + >>> config = cloud_speech_pb2.RecognitionConfig(encoding=encoding, sample_rate_hertz=sample_rate_hertz, language_code=language_code) + >>> uri = 'gs://bucket_name/file_name.flac' + >>> audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + >>> response = client.long_running_recognize(config, audio) + >>> + >>> def callback(operation_future): + >>> # Handle result. + >>> result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + config (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionConfig`): *Required* Provides information to the recognizer that specifies how to + process the request. + audio (:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.RecognitionAudio`): *Required* The audio data to be recognized. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax._OperationFuture` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = cloud_speech_pb2.LongRunningRecognizeRequest( + config=config, audio=audio) + return google.gax._OperationFuture( + self._long_running_recognize(request, + options), self.operations_client, + cloud_speech_pb2.LongRunningRecognizeResponse, + cloud_speech_pb2.LongRunningRecognizeMetadata, options) + + def streaming_recognize(self, requests, options=None): + """ + Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud.gapic.speech.v1 import speech_client + >>> from google.cloud.proto.speech.v1 import cloud_speech_pb2 + >>> client = speech_client.SpeechClient() + >>> request = cloud_speech_pb2.StreamingRecognizeRequest() + >>> requests = [request] + >>> for element in client.streaming_recognize(requests): + >>> # process element + >>> pass + + Args: + requests (iterator[:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.StreamingRecognizeRequest`]): The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + iterator[:class:`google.cloud.proto.speech.v1.cloud_speech_pb2.StreamingRecognizeResponse`]. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_recognize(requests, options) diff --git a/speech/google/cloud/gapic/speech/v1/speech_client_config.json b/speech/google/cloud/gapic/speech/v1/speech_client_config.json new file mode 100644 index 000000000000..4edd15ce865b --- /dev/null +++ b/speech/google/cloud/gapic/speech/v1/speech_client_config.json @@ -0,0 +1,43 @@ +{ + "interfaces": { + "google.cloud.speech.v1.Speech": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 190000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 190000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "Recognize": { + "timeout_millis": 190000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "LongRunningRecognize": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StreamingRecognize": { + "timeout_millis": 190000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/speech/google/cloud/proto/__init__.py b/speech/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/proto/speech/__init__.py b/speech/google/cloud/proto/speech/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/speech/google/cloud/proto/speech/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/speech/google/cloud/proto/speech/v1/__init__.py b/speech/google/cloud/proto/speech/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/speech/google/cloud/proto/speech/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/speech/google/cloud/proto/speech/v1/cloud_speech_pb2.py b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2.py new file mode 100644 index 000000000000..29d73064b556 --- /dev/null +++ b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2.py @@ -0,0 +1,1331 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/speech/v1/cloud_speech.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/speech/v1/cloud_speech.proto', + package='google.cloud.speech.v1', + syntax='proto3', + serialized_pb=_b('\n/google/cloud/proto/speech/v1/cloud_speech.proto\x12\x16google.cloud.speech.v1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\x86\x01\n\x10RecognizeRequest\x12\x39\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfig\x12\x37\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudio\"\x91\x01\n\x1bLongRunningRecognizeRequest\x12\x39\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfig\x12\x37\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudio\"\x99\x01\n\x19StreamingRecognizeRequest\x12N\n\x10streaming_config\x18\x01 \x01(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionConfigH\x00\x12\x17\n\raudio_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request\"\x8a\x01\n\x1aStreamingRecognitionConfig\x12\x39\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfig\x12\x18\n\x10single_utterance\x18\x02 \x01(\x08\x12\x17\n\x0finterim_results\x18\x03 \x01(\x08\"\x92\x03\n\x11RecognitionConfig\x12I\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32\x37.google.cloud.speech.v1.RecognitionConfig.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x15\n\rlanguage_code\x18\x03 \x01(\t\x12\x18\n\x10max_alternatives\x18\x04 \x01(\x05\x12\x18\n\x10profanity_filter\x18\x05 \x01(\x08\x12>\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32%.google.cloud.speech.v1.SpeechContext\"\x8b\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07\" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t\"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source\"U\n\x11RecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult\"`\n\x1cLongRunningRecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult\"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xb1\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x43\n\x07results\x18\x02 \x03(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionResult\x12]\n\x11speech_event_type\x18\x04 \x01(\x0e\x32\x42.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType\"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01\"\x8d\x01\n\x1aStreamingRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\"e\n\x17SpeechRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\"F\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x32\xa6\x03\n\x06Speech\x12\x81\x01\n\tRecognize\x12(.google.cloud.speech.v1.RecognizeRequest\x1a).google.cloud.speech.v1.RecognizeResponse\"\x1f\x82\xd3\xe4\x93\x02\x19\"\x14/v1/speech:recognize:\x01*\x12\x96\x01\n\x14LongRunningRecognize\x12\x33.google.cloud.speech.v1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1/speech:longrunningrecognize:\x01*\x12\x7f\n\x12StreamingRecognize\x12\x31.google.cloud.speech.v1.StreamingRecognizeRequest\x1a\x32.google.cloud.speech.v1.StreamingRecognizeResponse(\x01\x30\x01\x42i\n\x1a\x63om.google.cloud.speech.v1B\x0bSpeechProtoP\x01Z`__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeRequest) + )) +_sym_db.RegisterMessage(StreamingRecognizeRequest) + +StreamingRecognitionConfig = _reflection.GeneratedProtocolMessageType('StreamingRecognitionConfig', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGRECOGNITIONCONFIG, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Provides information to the recognizer that specifies how to process the + request. + + + Attributes: + config: + *Required* Provides information to the recognizer that + specifies how to process the request. + single_utterance: + *Optional* If ``false`` or omitted, the recognizer will + perform continuous recognition (continuing to wait for and + process audio even if the user pauses speaking) until the + client closes the input stream (gRPC API) or until the maximum + time limit has been reached. May return multiple + ``StreamingRecognitionResult``\ s with the ``is_final`` flag + set to ``true``. If ``true``, the recognizer will detect a + single spoken utterance. When it detects that the user has + paused or stopped speaking, it will return an + ``END_OF_SINGLE_UTTERANCE`` event and cease recognition. It + will return no more than one ``StreamingRecognitionResult`` + with the ``is_final`` flag set to ``true``. + interim_results: + *Optional* If ``true``, interim results (tentative hypotheses) + may be returned as they become available (these interim + results are indicated with the ``is_final=false`` flag). If + ``false`` or omitted, only ``is_final=true`` result(s) are + returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognitionConfig) + )) +_sym_db.RegisterMessage(StreamingRecognitionConfig) + +RecognitionConfig = _reflection.GeneratedProtocolMessageType('RecognitionConfig', (_message.Message,), dict( + DESCRIPTOR = _RECOGNITIONCONFIG, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Provides information to the recognizer that specifies how to process the + request. + + + Attributes: + encoding: + *Required* Encoding of audio data sent in all + ``RecognitionAudio`` messages. + sample_rate_hertz: + *Required* Sample rate in Hertz of the audio data sent in all + ``RecognitionAudio`` messages. Valid values are: 8000-48000. + 16000 is optimal. For best results, set the sampling rate of + the audio source to 16000 Hz. If that's not possible, use the + native sample rate of the audio source (instead of re- + sampling). + language_code: + *Required* The language of the supplied audio as a `BCP-47 + `__ language + tag. Example: "en-US". See `Language Support + `__ for a list + of the currently supported language codes. + max_alternatives: + *Optional* Maximum number of recognition hypotheses to be + returned. Specifically, the maximum number of + ``SpeechRecognitionAlternative`` messages within each + ``SpeechRecognitionResult``. The server may return fewer than + ``max_alternatives``. Valid values are ``0``-``30``. A value + of ``0`` or ``1`` will return a maximum of one. If omitted, + will return a maximum of one. + profanity_filter: + *Optional* If set to ``true``, the server will attempt to + filter out profanities, replacing all but the initial + character in each filtered word with asterisks, e.g. + "f\*\*\*". If set to ``false`` or omitted, profanities won't + be filtered out. + speech_contexts: + *Optional* A means to provide context to assist the speech + recognition. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognitionConfig) + )) +_sym_db.RegisterMessage(RecognitionConfig) + +SpeechContext = _reflection.GeneratedProtocolMessageType('SpeechContext', (_message.Message,), dict( + DESCRIPTOR = _SPEECHCONTEXT, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Provides "hints" to the speech recognizer to favor specific words and + phrases in the results. + + + Attributes: + phrases: + *Optional* A list of strings containing words and phrases + "hints" so that the speech recognition is more likely to + recognize them. This can be used to improve the accuracy for + specific words and phrases, for example, if specific commands + are typically spoken by the user. This can also be used to add + additional words to the vocabulary of the recognizer. See + `usage limits + `__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechContext) + )) +_sym_db.RegisterMessage(SpeechContext) + +RecognitionAudio = _reflection.GeneratedProtocolMessageType('RecognitionAudio', (_message.Message,), dict( + DESCRIPTOR = _RECOGNITIONAUDIO, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Contains audio data in the encoding specified in the + ``RecognitionConfig``. Either ``content`` or ``uri`` must be supplied. + Supplying both or neither returns [google.rpc.Code.INVALID\_ARGUMENT][]. + See `audio limits `__. + + + Attributes: + content: + The audio data bytes encoded as specified in + ``RecognitionConfig``. Note: as with all bytes fields, + protobuffers use a pure binary representation, whereas JSON + representations use base64. + uri: + URI that points to a file that contains audio data bytes as + specified in ``RecognitionConfig``. Currently, only Google + Cloud Storage URIs are supported, which must be specified in + the following format: ``gs://bucket_name/object_name`` (other + URI formats return [google.rpc.Code.INVALID\_ARGUMENT][]). For + more information, see `Request URIs + `__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognitionAudio) + )) +_sym_db.RegisterMessage(RecognitionAudio) + +RecognizeResponse = _reflection.GeneratedProtocolMessageType('RecognizeResponse', (_message.Message,), dict( + DESCRIPTOR = _RECOGNIZERESPONSE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """The only message returned to the client by the ``Recognize`` method. It + contains the result as zero or more sequential + ``SpeechRecognitionResult`` messages. + + + Attributes: + results: + *Output-only* Sequential list of transcription results + corresponding to sequential portions of audio. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognizeResponse) + )) +_sym_db.RegisterMessage(RecognizeResponse) + +LongRunningRecognizeResponse = _reflection.GeneratedProtocolMessageType('LongRunningRecognizeResponse', (_message.Message,), dict( + DESCRIPTOR = _LONGRUNNINGRECOGNIZERESPONSE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """The only message returned to the client by the ``LongRunningRecognize`` + method. It contains the result as zero or more sequential + ``SpeechRecognitionResult`` messages. It is included in the + ``result.response`` field of the ``Operation`` returned by the + ``GetOperation`` call of the ``google::longrunning::Operations`` + service. + + + Attributes: + results: + *Output-only* Sequential list of transcription results + corresponding to sequential portions of audio. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.LongRunningRecognizeResponse) + )) +_sym_db.RegisterMessage(LongRunningRecognizeResponse) + +LongRunningRecognizeMetadata = _reflection.GeneratedProtocolMessageType('LongRunningRecognizeMetadata', (_message.Message,), dict( + DESCRIPTOR = _LONGRUNNINGRECOGNIZEMETADATA, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Describes the progress of a long-running ``LongRunningRecognize`` call. + It is included in the ``metadata`` field of the ``Operation`` returned + by the ``GetOperation`` call of the ``google::longrunning::Operations`` + service. + + + Attributes: + progress_percent: + Approximate percentage of audio processed thus far. Guaranteed + to be 100 when the audio is fully processed and the results + are available. + start_time: + Time when the request was received. + last_update_time: + Time of the most recent processing update. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.LongRunningRecognizeMetadata) + )) +_sym_db.RegisterMessage(LongRunningRecognizeMetadata) + +StreamingRecognizeResponse = _reflection.GeneratedProtocolMessageType('StreamingRecognizeResponse', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGRECOGNIZERESPONSE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """``StreamingRecognizeResponse`` is the only message returned to the + client by ``StreamingRecognize``. A series of one or more + ``StreamingRecognizeResponse`` messages are streamed back to the client. + + Here's an example of a series of ten ``StreamingRecognizeResponse``\ s + that might be returned while processing audio: + + 1. results { alternatives { transcript: "tube" } stability: 0.01 } + + 2. results { alternatives { transcript: "to be a" } stability: 0.01 } + + 3. results { alternatives { transcript: "to be" } stability: 0.9 } + results { alternatives { transcript: " or not to be" } stability: + 0.01 } + + 4. results { alternatives { transcript: "to be or not to be" confidence: + 0.92 } alternatives { transcript: "to bee or not to bee" } is\_final: + true } + + 5. results { alternatives { transcript: " that's" } stability: 0.01 } + + 6. results { alternatives { transcript: " that is" } stability: 0.9 } + results { alternatives { transcript: " the question" } stability: + 0.01 } + + 7. speech\_event\_type: END\_OF\_SINGLE\_UTTERANCE + + 8. results { alternatives { transcript: " that is the question" + confidence: 0.98 } alternatives { transcript: " that was the + question" } is\_final: true } + + Notes: + + - Only two of the above responses #4 and #8 contain final results; they + are indicated by ``is_final: true``. Concatenating these together + generates the full transcript: "to be or not to be that is the + question". + + - The others contain interim ``results``. #3 and #6 contain two interim + ``results``: the first portion has a high stability and is less + likely to change; the second portion has a low stability and is very + likely to change. A UI designer might choose to show only high + stability ``results``. + + - The specific ``stability`` and ``confidence`` values shown above are + only for illustrative purposes. Actual values may vary. + + - In each response, only one of these fields will be set: ``error``, + ``speech_event_type``, or one or more (repeated) ``results``. + + + + + Attributes: + error: + *Output-only* If set, returns a [google.rpc.Status][] message + that specifies the error for the operation. + results: + *Output-only* This repeated list contains zero or more results + that correspond to consecutive portions of the audio currently + being processed. It contains zero or one ``is_final=true`` + result (the newly settled portion), followed by zero or more + ``is_final=false`` results. + speech_event_type: + *Output-only* Indicates the type of speech event. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeResponse) + )) +_sym_db.RegisterMessage(StreamingRecognizeResponse) + +StreamingRecognitionResult = _reflection.GeneratedProtocolMessageType('StreamingRecognitionResult', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGRECOGNITIONRESULT, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """A streaming speech recognition result corresponding to a portion of the + audio that is currently being processed. + + + Attributes: + alternatives: + *Output-only* May contain one or more recognition hypotheses + (up to the maximum specified in ``max_alternatives``). + is_final: + *Output-only* If ``false``, this + ``StreamingRecognitionResult`` represents an interim result + that may change. If ``true``, this is the final time the + speech service will return this particular + ``StreamingRecognitionResult``, the recognizer will not return + any further hypotheses for this portion of the transcript and + corresponding audio. + stability: + *Output-only* An estimate of the likelihood that the + recognizer will not change its guess about this interim + result. Values range from 0.0 (completely unstable) to 1.0 + (completely stable). This field is only provided for interim + results (``is_final=false``). The default of 0.0 is a sentinel + value indicating ``stability`` was not set. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognitionResult) + )) +_sym_db.RegisterMessage(StreamingRecognitionResult) + +SpeechRecognitionResult = _reflection.GeneratedProtocolMessageType('SpeechRecognitionResult', (_message.Message,), dict( + DESCRIPTOR = _SPEECHRECOGNITIONRESULT, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """A speech recognition result corresponding to a portion of the audio. + + + Attributes: + alternatives: + *Output-only* May contain one or more recognition hypotheses + (up to the maximum specified in ``max_alternatives``). + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechRecognitionResult) + )) +_sym_db.RegisterMessage(SpeechRecognitionResult) + +SpeechRecognitionAlternative = _reflection.GeneratedProtocolMessageType('SpeechRecognitionAlternative', (_message.Message,), dict( + DESCRIPTOR = _SPEECHRECOGNITIONALTERNATIVE, + __module__ = 'google.cloud.proto.speech.v1.cloud_speech_pb2' + , + __doc__ = """Alternative hypotheses (a.k.a. n-best list). + + + Attributes: + transcript: + *Output-only* Transcript text representing the words that the + user spoke. + confidence: + *Output-only* The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is typically + provided only for the top hypothesis, and only for + ``is_final=true`` results. Clients should not rely on the + ``confidence`` field as it is not guaranteed to be accurate, + or even set, in any of the results. The default of 0.0 is a + sentinel value indicating ``confidence`` was not set. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechRecognitionAlternative) + )) +_sym_db.RegisterMessage(SpeechRecognitionAlternative) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\032com.google.cloud.speech.v1B\013SpeechProtoP\001Z=0.15.0.""" + """Service that implements Google Cloud Speech API. + """ + def Recognize(self, request, context): + """Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def LongRunningRecognize(self, request, context): + """Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + `Operation.error` or an `Operation.response` which contains + a `LongRunningRecognizeResponse` message. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def StreamingRecognize(self, request_iterator, context): + """Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaSpeechStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Service that implements Google Cloud Speech API. + """ + def Recognize(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + """ + raise NotImplementedError() + Recognize.future = None + def LongRunningRecognize(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + `Operation.error` or an `Operation.response` which contains + a `LongRunningRecognizeResponse` message. + """ + raise NotImplementedError() + LongRunningRecognize.future = None + def StreamingRecognize(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + """ + raise NotImplementedError() + + + def beta_create_Speech_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): LongRunningRecognizeRequest.FromString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeRequest.FromString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeRequest.FromString, + } + response_serializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeResponse.SerializeToString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeResponse.SerializeToString, + } + method_implementations = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): face_utilities.unary_unary_inline(servicer.LongRunningRecognize), + ('google.cloud.speech.v1.Speech', 'Recognize'): face_utilities.unary_unary_inline(servicer.Recognize), + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): face_utilities.stream_stream_inline(servicer.StreamingRecognize), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Speech_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): LongRunningRecognizeRequest.SerializeToString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeRequest.SerializeToString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeRequest.SerializeToString, + } + response_deserializers = { + ('google.cloud.speech.v1.Speech', 'LongRunningRecognize'): google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ('google.cloud.speech.v1.Speech', 'Recognize'): RecognizeResponse.FromString, + ('google.cloud.speech.v1.Speech', 'StreamingRecognize'): StreamingRecognizeResponse.FromString, + } + cardinalities = { + 'LongRunningRecognize': cardinality.Cardinality.UNARY_UNARY, + 'Recognize': cardinality.Cardinality.UNARY_UNARY, + 'StreamingRecognize': cardinality.Cardinality.STREAM_STREAM, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.cloud.speech.v1.Speech', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/speech/google/cloud/proto/speech/v1/cloud_speech_pb2_grpc.py b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2_grpc.py new file mode 100644 index 000000000000..730f8443a3bd --- /dev/null +++ b/speech/google/cloud/proto/speech/v1/cloud_speech_pb2_grpc.py @@ -0,0 +1,86 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.speech.v1.cloud_speech_pb2 as google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2 +import google.longrunning.operations_pb2 as google_dot_longrunning_dot_operations__pb2 + + +class SpeechStub(object): + """Service that implements Google Cloud Speech API. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Recognize = channel.unary_unary( + '/google.cloud.speech.v1.Speech/Recognize', + request_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeResponse.FromString, + ) + self.LongRunningRecognize = channel.unary_unary( + '/google.cloud.speech.v1.Speech/LongRunningRecognize', + request_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.LongRunningRecognizeRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.StreamingRecognize = channel.stream_stream( + '/google.cloud.speech.v1.Speech/StreamingRecognize', + request_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeResponse.FromString, + ) + + +class SpeechServicer(object): + """Service that implements Google Cloud Speech API. + """ + + def Recognize(self, request, context): + """Performs synchronous speech recognition: receive results after all audio + has been sent and processed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LongRunningRecognize(self, request, context): + """Performs asynchronous speech recognition: receive results via the + google.longrunning.Operations interface. Returns either an + `Operation.error` or an `Operation.response` which contains + a `LongRunningRecognizeResponse` message. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingRecognize(self, request_iterator, context): + """Performs bidirectional streaming speech recognition: receive results while + sending audio. This method is only available via the gRPC API (not REST). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SpeechServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Recognize': grpc.unary_unary_rpc_method_handler( + servicer.Recognize, + request_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.RecognizeResponse.SerializeToString, + ), + 'LongRunningRecognize': grpc.unary_unary_rpc_method_handler( + servicer.LongRunningRecognize, + request_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.LongRunningRecognizeRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'StreamingRecognize': grpc.stream_stream_rpc_method_handler( + servicer.StreamingRecognize, + request_deserializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_speech_dot_v1_dot_cloud__speech__pb2.StreamingRecognizeResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.speech.v1.Speech', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/speech/google/cloud/speech/__init__.py b/speech/google/cloud/speech/__init__.py index 9c1654a2a6c7..1035b45c1d0d 100644 --- a/speech/google/cloud/speech/__init__.py +++ b/speech/google/cloud/speech/__init__.py @@ -23,5 +23,23 @@ from google.cloud.speech.encoding import Encoding from google.cloud.speech.operation import Operation +from google.cloud.speech_v1 import enums +from google.cloud.speech_v1 import SpeechClient +from google.cloud.speech_v1 import types -__all__ = ['__version__', 'Alternative', 'Client', 'Encoding', 'Operation'] + +__all__ = ( + # Common + '__version__', + + # Deprecated Manual Layer + 'Alternative', + 'Client', + 'Encoding', + 'Operation', + + # GAPIC & Partial Manual Layer + 'enums', + 'SpeechClient', + 'types', +) diff --git a/speech/google/cloud/speech/_gax.py b/speech/google/cloud/speech/_gax.py index c03c08540214..48d063bfaa8e 100644 --- a/speech/google/cloud/speech/_gax.py +++ b/speech/google/cloud/speech/_gax.py @@ -26,8 +26,7 @@ StreamingRecognizeRequest) from google.longrunning import operations_grpc -from google.cloud._helpers import make_secure_channel -from google.cloud._helpers import make_secure_stub +from google.cloud import _helpers from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.speech import __version__ @@ -46,7 +45,7 @@ class GAPICSpeechAPI(object): def __init__(self, client=None): self._client = client credentials = self._client._credentials - channel = make_secure_channel( + channel = _helpers.make_secure_channel( credentials, DEFAULT_USER_AGENT, SpeechClient.SERVICE_ADDRESS) self._gapic_api = SpeechClient( @@ -54,7 +53,7 @@ def __init__(self, client=None): lib_name='gccl', lib_version=__version__, ) - self._operations_stub = make_secure_stub( + self._operations_stub = _helpers.make_secure_stub( credentials, DEFAULT_USER_AGENT, operations_grpc.OperationsStub, diff --git a/speech/google/cloud/speech/client.py b/speech/google/cloud/speech/client.py index f9eb211c4a80..7c066d48cb9d 100644 --- a/speech/google/cloud/speech/client.py +++ b/speech/google/cloud/speech/client.py @@ -14,7 +14,10 @@ """Basic client for Google Cloud Speech API.""" +from __future__ import absolute_import + import os +import warnings from google.cloud.client import Client as BaseClient from google.cloud.environment_vars import DISABLE_GRPC @@ -60,6 +63,14 @@ class Client(BaseClient): _speech_api = None def __init__(self, credentials=None, _http=None, _use_grpc=None): + warnings.warn( + 'This client class and objects that derive from it have been ' + 'deprecated. Use `google.cloud.speech.SpeechClient` ' + '(provided by this package) instead. This client will be removed ' + 'in a future release.', + DeprecationWarning, + ) + super(Client, self).__init__(credentials=credentials, _http=_http) # Save on the actual client class whether we use GAX or not. if _use_grpc is None: diff --git a/speech/google/cloud/speech_v1/__init__.py b/speech/google/cloud/speech_v1/__init__.py new file mode 100644 index 000000000000..be9c3772b4a6 --- /dev/null +++ b/speech/google/cloud/speech_v1/__init__.py @@ -0,0 +1,34 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.gapic.speech.v1 import speech_client +from google.cloud.gapic.speech.v1 import enums + +from google.cloud.speech_v1.helpers import SpeechHelpers +from google.cloud.speech_v1 import types + + +class SpeechClient(SpeechHelpers, speech_client.SpeechClient): + __doc__ = speech_client.SpeechClient.__doc__ + enums = enums + types = types + + +__all__ = ( + 'enums', + 'SpeechClient', + 'types', +) diff --git a/speech/google/cloud/speech_v1/helpers.py b/speech/google/cloud/speech_v1/helpers.py new file mode 100644 index 000000000000..8ecddc2738f1 --- /dev/null +++ b/speech/google/cloud/speech_v1/helpers.py @@ -0,0 +1,88 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + + +class SpeechHelpers(object): + """A set of convenience methods to make the Speech client easier to use. + + This class should be considered abstract; it is used as a superclass + in a multiple-inheritance construction alongside the applicable GAPIC. + See the :class:`~google.cloud.speech_v1.SpeechClient`. + """ + def streaming_recognize(self, config, requests, options=None): + """Perform bi-directional speech recognition. + + This method allows you to receive results while sending audio; + it is only available via. gRPC (not REST). + + .. warning:: + + This method is EXPERIMENTAL. Its interface might change in the + future. + + Example: + >>> from google.cloud.speech_v1 import enums + >>> from google.cloud.speech_v1 import SpeechClient + >>> from google.cloud.speech_v1 import types + >>> client = SpeechClient() + >>> config = types.StreamingRecognitionConfig( + ... config=types.RecognitionConfig( + ... encoding=enums.RecognitionConfig.AudioEncoding.FLAC, + ... ), + ... ) + >>> request = types.StreamingRecognizeRequest(audio_content=b'...') + >>> requests = [request] + >>> for element in client.streaming_recognize(config, requests): + ... # process element + ... pass + + Args: + config (:class:`~.types.StreamingRecognitionConfig`): The + configuration to use for the stream. + requests (Iterable[:class:`~.types.StreamingRecognizeRequest`]): + The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + Iterable[:class:`~.types.StreamingRecognizeResponse`] + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_recognize( + self._streaming_request_iterable(config, requests), + options, + ) + + def _streaming_request_iterable(self, config, requests): + """A generator that yields the config followed by the requests. + + Args: + config (~.speech_v1.types.StreamingRecognitionConfig): The + configuration to use for the stream. + requests (Iterable[~.speech_v1.types.StreamingRecognizeRequest]): + The input objects. + + Returns: + Iterable[~.speech_v1.types.StreamingRecognizeRequest]): The + correctly formatted input for + :meth:`~.speech_v1.SpeechClient.streaming_recognize`. + """ + yield self.types.StreamingRecognizeRequest(streaming_config=config) + for request in requests: + yield request diff --git a/speech/google/cloud/speech_v1/types.py b/speech/google/cloud/speech_v1/types.py new file mode 100644 index 000000000000..75ec9a5d2b59 --- /dev/null +++ b/speech/google/cloud/speech_v1/types.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.cloud.proto.speech.v1 import cloud_speech_pb2 + +from google.gax.utils.messages import get_messages + + +names = [] +for name, message in get_messages(cloud_speech_pb2).items(): + message.__module__ = 'google.cloud.speech_v1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/speech/nox.py b/speech/nox.py index fdda2298bc43..ee174668d021 100644 --- a/speech/nox.py +++ b/speech/nox.py @@ -38,10 +38,16 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.speech', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', '--quiet', + '--cov=google.cloud.speech', + '--cov=google.cloud.speech_v1', + '--cov=tests.unit' + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=0', + os.path.join('tests', 'unit'), ) diff --git a/speech/setup.py b/speech/setup.py index 7c208dffdd88..7bd990e2be3d 100644 --- a/speech/setup.py +++ b/speech/setup.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import io import os from setuptools import find_packages @@ -20,6 +21,7 @@ PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: README = file_obj.read() @@ -51,20 +53,44 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.25.0, < 0.26dev', - 'grpcio >= 1.0.2, < 2.0dev', - 'gapic-google-cloud-speech-v1 >= 0.15.3, < 0.16dev', + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', ] setup( + author='Google Cloud Platform', + author_email='googleapis-packages@google.com', name='google-cloud-speech', - version='0.26.0', + version='0.27.0', description='Python Client for Google Cloud Speech', long_description=README, namespace_packages=[ 'google', 'google.cloud', + 'google.cloud.gapic', + 'google.cloud.gapic.speech', + 'google.cloud.proto', + 'google.cloud.proto.speech', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, - **SETUP_BASE + url='https://github.com/GoogleCloudPlatform/google-cloud-python', + license='Apache 2.0', + platforms='Posix; MacOS X; Windows', + include_package_data=True, + zip_safe=False, + scripts=[], + classifiers=[ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Topic :: Internet', + ], ) diff --git a/speech/tests/gapic/test_speech_client_v1.py b/speech/tests/gapic/test_speech_client_v1.py new file mode 100644 index 000000000000..acd196adde68 --- /dev/null +++ b/speech/tests/gapic/test_speech_client_v1.py @@ -0,0 +1,212 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import mock +import unittest + +from google.gax import errors +from google.rpc import status_pb2 + +from google.cloud.gapic.speech.v1 import enums +from google.cloud.gapic.speech.v1 import speech_client +from google.cloud.proto.speech.v1 import cloud_speech_pb2 +from google.longrunning import operations_pb2 + + +class CustomException(Exception): + pass + + +class TestSpeechClient(unittest.TestCase): + @mock.patch('google.gax.config.create_stub', spec=True) + def test_recognize(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock response + expected_response = cloud_speech_pb2.RecognizeResponse() + grpc_stub.Recognize.return_value = expected_response + + response = client.recognize(config, audio) + self.assertEqual(expected_response, response) + + grpc_stub.Recognize.assert_called_once() + args, kwargs = grpc_stub.Recognize.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = cloud_speech_pb2.RecognizeRequest( + config=config, audio=audio) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_recognize_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock exception response + grpc_stub.Recognize.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.recognize, config, audio) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_long_running_recognize(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock response + expected_response = cloud_speech_pb2.LongRunningRecognizeResponse() + operation = operations_pb2.Operation( + name='operations/test_long_running_recognize', done=True) + operation.response.Pack(expected_response) + grpc_stub.LongRunningRecognize.return_value = operation + + response = client.long_running_recognize(config, audio) + self.assertEqual(expected_response, response.result()) + + grpc_stub.LongRunningRecognize.assert_called_once() + args, kwargs = grpc_stub.LongRunningRecognize.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = cloud_speech_pb2.LongRunningRecognizeRequest( + config=config, audio=audio) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_long_running_recognize_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + encoding = enums.RecognitionConfig.AudioEncoding.FLAC + sample_rate_hertz = 44100 + language_code = 'en-US' + config = cloud_speech_pb2.RecognitionConfig( + encoding=encoding, + sample_rate_hertz=sample_rate_hertz, + language_code=language_code) + uri = 'gs://bucket_name/file_name.flac' + audio = cloud_speech_pb2.RecognitionAudio(uri=uri) + + # Mock exception response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name='operations/test_long_running_recognize_exception', done=True) + operation.error.CopyFrom(error) + grpc_stub.LongRunningRecognize.return_value = operation + + response = client.long_running_recognize(config, audio) + self.assertEqual(error, response.exception()) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_streaming_recognize(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + request = cloud_speech_pb2.StreamingRecognizeRequest() + requests = [request] + + # Mock response + expected_response = cloud_speech_pb2.StreamingRecognizeResponse() + grpc_stub.StreamingRecognize.return_value = iter([expected_response]) + + response = client.streaming_recognize(requests) + resources = list(response) + self.assertEqual(1, len(resources)) + self.assertEqual(expected_response, resources[0]) + + grpc_stub.StreamingRecognize.assert_called_once() + args, kwargs = grpc_stub.StreamingRecognize.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_requests = args[0] + self.assertEqual(1, len(actual_requests)) + actual_request = list(actual_requests)[0] + self.assertEqual(request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_streaming_recognize_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = speech_client.SpeechClient() + + # Mock request + request = cloud_speech_pb2.StreamingRecognizeRequest() + requests = [request] + + # Mock exception response + grpc_stub.StreamingRecognize.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.streaming_recognize, + requests) diff --git a/speech/tests/system.py b/speech/tests/system.py index 0c4acfb52767..35c1ee3d1521 100644 --- a/speech/tests/system.py +++ b/speech/tests/system.py @@ -16,6 +16,8 @@ import time import unittest +import six + from google.cloud import exceptions from google.cloud import speech from google.cloud import storage @@ -158,11 +160,11 @@ def test_sync_recognize_local_file(self): content = file_obj.read() results = self._make_sync_request(content=content, - max_alternatives=2) + max_alternatives=1) self.assertEqual(len(results), 1) alternatives = results[0].alternatives - self.assertEqual(len(alternatives), 2) - self._check_results(alternatives, 2) + self.assertEqual(len(alternatives), 1) + self._check_results(alternatives, 1) def test_sync_recognize_gcs_file(self): bucket_name = Config.TEST_BUCKET.name @@ -183,12 +185,12 @@ def test_async_recognize_local_file(self): content = file_obj.read() operation = self._make_async_request(content=content, - max_alternatives=2) + max_alternatives=1) _wait_until_complete(operation) self.assertEqual(len(operation.results), 1) alternatives = operation.results[0].alternatives - self.assertEqual(len(alternatives), 2) - self._check_results(alternatives, 2) + self.assertEqual(len(alternatives), 1) + self._check_results(alternatives, 1) def test_async_recognize_gcs_file(self): bucket_name = Config.TEST_BUCKET.name @@ -200,13 +202,13 @@ def test_async_recognize_gcs_file(self): source_uri = 'gs://%s/%s' % (bucket_name, blob_name) operation = self._make_async_request(source_uri=source_uri, - max_alternatives=2) + max_alternatives=1) _wait_until_complete(operation) self.assertEqual(len(operation.results), 1) alternatives = operation.results[0].alternatives - self.assertEqual(len(alternatives), 2) - self._check_results(alternatives, 2) + self.assertEqual(len(alternatives), 1) + self._check_results(alternatives, 1) def test_stream_recognize(self): if not Config.USE_GRPC: @@ -220,18 +222,17 @@ def test_stream_recognize_interim_results(self): if not Config.USE_GRPC: self.skipTest('gRPC is required for Speech Streaming Recognize.') - # These extra words are interim_results that the API returns as it's - # deciphering the speech audio. This has a high probability of becoming - # out of date and causing the test to fail. - extras = ' Google Now who hello thank you for you for use hello ' + # Just test that the iterim results exist; the exact value can and + # does change, so writing a test for it is difficult. with open(AUDIO_FILE, 'rb') as file_obj: recognize = self._make_streaming_request(file_obj, interim_results=True) responses = list(recognize) for response in responses: - if response.alternatives[0].transcript: - self.assertIn(response.alternatives[0].transcript, - extras + self.ASSERT_TEXT) + self.assertIsInstance( + response.alternatives[0].transcript, + six.text_type, + ) self.assertGreater(len(responses), 5) self._check_results(responses[-1].alternatives) diff --git a/speech/tests/unit/test__gax.py b/speech/tests/unit/test__gax.py index 7cf44ba58f6e..4587f3b6d6a5 100644 --- a/speech/tests/unit/test__gax.py +++ b/speech/tests/unit/test__gax.py @@ -34,18 +34,17 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) + @mock.patch('google.cloud._helpers.make_secure_channel', + return_value=mock.sentinel.channel) @mock.patch( - 'google.cloud._helpers.make_secure_channel', - return_value=mock.sentinel.channel) - @mock.patch( - 'google.cloud.gapic.speech.v1.speech_client.SpeechClient', - SERVICE_ADDRESS='hey.you.guys') - @mock.patch( - 'google.cloud._helpers.make_secure_stub', - return_value=mock.sentinel.stub) - def test_constructor(self, mocked_stub, mocked_cls, mocked_channel): + 'google.cloud.gapic.speech.v1.speech_client.SpeechClient.__init__', + return_value=None) + @mock.patch('google.cloud._helpers.make_secure_stub', + return_value=mock.sentinel.stub) + def test_constructor(self, mocked_stub, mocked_init, mocked_channel): from google.longrunning import operations_grpc from google.cloud._http import DEFAULT_USER_AGENT + from google.cloud.gapic.speech.v1.speech_client import SpeechClient from google.cloud.speech import __version__ from google.cloud.speech._gax import OPERATIONS_API_HOST @@ -57,17 +56,17 @@ def test_constructor(self, mocked_stub, mocked_cls, mocked_channel): speech_api = self._make_one(mock_client) self.assertIs(speech_api._client, mock_client) - self.assertIs(speech_api._gapic_api, mocked_cls.return_value) + self.assertIsInstance(speech_api._gapic_api, SpeechClient) mocked_stub.assert_called_once_with( mock_cnxn.credentials, DEFAULT_USER_AGENT, operations_grpc.OperationsStub, OPERATIONS_API_HOST) - mocked_cls.assert_called_once_with( + mocked_init.assert_called_once_with( channel=mock.sentinel.channel, lib_name='gccl', lib_version=__version__) mocked_channel.assert_called_once_with( mock_cnxn.credentials, DEFAULT_USER_AGENT, - mocked_cls.SERVICE_ADDRESS) + 'speech.googleapis.com') class TestSpeechGAXMakeRequests(unittest.TestCase): diff --git a/speech/tests/unit/test_client.py b/speech/tests/unit/test_client.py index ef3ea2dc84e6..259df66b0a3d 100644 --- a/speech/tests/unit/test_client.py +++ b/speech/tests/unit/test_client.py @@ -246,6 +246,7 @@ def test_sync_recognize_with_empty_results_no_gax(self): next(sample.recognize(language_code='en-US')) def test_sync_recognize_with_empty_results_gax(self): + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud import speech @@ -255,13 +256,6 @@ def test_sync_recognize_with_empty_results_gax(self): client = self._make_one(credentials=credentials, _use_grpc=True) client._credentials = credentials - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( response=_make_sync_response(), channel=channel, **kwargs) @@ -269,16 +263,19 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) - low_level = client.speech_api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - self.assertEqual( - channel_args, - [(credentials, _gax.DEFAULT_USER_AGENT, host)]) + low_level = client.speech_api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) + + assert msc.mock_calls[0] == mock.call( + credentials, + _gax.DEFAULT_USER_AGENT, + host, + ) sample = client.sample( source_uri=self.AUDIO_SOURCE_URI, encoding=speech.Encoding.FLAC, @@ -288,6 +285,7 @@ def speech_api(channel=None, **kwargs): next(sample.recognize(language_code='en-US')) def test_sync_recognize_with_gax(self): + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud import speech @@ -306,13 +304,6 @@ def test_sync_recognize_with_gax(self): }] result = _make_result(alternatives) - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( response=_make_sync_response(result), channel=channel, @@ -325,15 +316,19 @@ def speech_api(channel=None, **kwargs): source_uri=self.AUDIO_SOURCE_URI, encoding=speech.Encoding.FLAC, sample_rate_hertz=self.SAMPLE_RATE) - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) - low_level = client.speech_api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - self.assertEqual( - channel_args, [(creds, _gax.DEFAULT_USER_AGENT, host)]) + low_level = client.speech_api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) + + assert msc.mock_calls[0] == mock.call( + creds, + _gax.DEFAULT_USER_AGENT, + host, + ) results = [i for i in sample.recognize(language_code='en-US')] @@ -351,18 +346,6 @@ def speech_api(channel=None, **kwargs): self.assertEqual( result.alternatives[1].confidence, alternatives[1]['confidence']) - def test_async_supported_encodings(self): - from google.cloud import speech - - credentials = _make_credentials() - client = self._make_one(credentials=credentials, _use_grpc=True) - - sample = client.sample( - source_uri=self.AUDIO_SOURCE_URI, encoding=speech.Encoding.FLAC, - sample_rate_hertz=self.SAMPLE_RATE) - with self.assertRaises(ValueError): - sample.recognize(language_code='en-US') - def test_async_recognize_no_gax(self): from google.cloud import speech from google.cloud.speech.operation import Operation @@ -392,6 +375,7 @@ def test_async_recognize_no_gax(self): def test_async_recognize_with_gax(self): from google.cloud._testing import _Monkey + from google.cloud import _helpers from google.cloud import speech from google.cloud.speech import _gax from google.cloud.speech.operation import Operation @@ -400,13 +384,6 @@ def test_async_recognize_with_gax(self): client = self._make_one(credentials=credentials, _use_grpc=True) client._credentials = credentials - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - sample = client.sample( encoding=speech.Encoding.LINEAR16, sample_rate_hertz=self.SAMPLE_RATE, @@ -415,20 +392,21 @@ def make_channel(*args): def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI(channel=channel, **kwargs) + speech_api.SERVICE_ADDRESS = 'foo.api.invalid' - host = 'foo.apis.invalid' - speech_api.SERVICE_ADDRESS = host + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + api = client.speech_api - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - api = client.speech_api + low_level = api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) - low_level = api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - expected = (credentials, _gax.DEFAULT_USER_AGENT, - low_level.SERVICE_ADDRESS) - self.assertEqual(channel_args, [expected]) + assert msc.mock_calls[0] == mock.call( + credentials, + _gax.DEFAULT_USER_AGENT, + 'foo.api.invalid', + ) operation = sample.long_running_recognize(language_code='en-US') self.assertIsInstance(operation, Operation) @@ -450,6 +428,7 @@ def test_streaming_depends_on_gax(self): def test_streaming_closed_stream(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -460,13 +439,6 @@ def test_streaming_closed_stream(self): client = self._make_one(credentials=credentials) client._credentials = credentials - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI(channel=channel, **kwargs) @@ -480,9 +452,9 @@ def speech_api(channel=None, **kwargs): stream=stream, encoding=Encoding.LINEAR16, sample_rate_hertz=self.SAMPLE_RATE) - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) with self.assertRaises(ValueError): list(sample.streaming_recognize(language_code='en-US')) @@ -490,6 +462,7 @@ def speech_api(channel=None, **kwargs): def test_stream_recognize_interim_results(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -518,13 +491,6 @@ def test_stream_recognize_interim_results(self): alternatives, is_final=True, stability=0.4375)) responses = [first_response, second_response, last_response] - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( channel=channel, response=responses, **kwargs) @@ -532,9 +498,9 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) sample = client.sample( stream=stream, encoding=Encoding.LINEAR16, @@ -582,6 +548,7 @@ def speech_api(channel=None, **kwargs): def test_stream_recognize(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -609,10 +576,6 @@ def test_stream_recognize(self): channel_args = [] channel_obj = object() - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( channel=channel, response=responses, **kwargs) @@ -620,9 +583,9 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) sample = client.sample( stream=stream, encoding=Encoding.LINEAR16, @@ -639,6 +602,7 @@ def speech_api(channel=None, **kwargs): def test_stream_recognize_no_results(self): from io import BytesIO + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -651,13 +615,6 @@ def test_stream_recognize_no_results(self): responses = [_make_streaming_response()] - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI( channel=channel, response=responses, **kwargs) @@ -665,9 +622,9 @@ def speech_api(channel=None, **kwargs): host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) sample = client.sample( stream=stream, encoding=Encoding.LINEAR16, @@ -677,6 +634,7 @@ def speech_api(channel=None, **kwargs): self.assertEqual(results, []) def test_speech_api_with_gax(self): + from google.cloud import _helpers from google.cloud._testing import _Monkey from google.cloud.speech import _gax @@ -685,29 +643,25 @@ def test_speech_api_with_gax(self): client = self._make_one(credentials=creds, _use_grpc=True) client._credentials = creds - channel_args = [] - channel_obj = object() - - def make_channel(*args): - channel_args.append(args) - return channel_obj - def speech_api(channel=None, **kwargs): return _MockGAPICSpeechAPI(channel=channel, **kwargs) host = 'foo.apis.invalid' speech_api.SERVICE_ADDRESS = host - with _Monkey(_gax, SpeechClient=speech_api, - make_secure_channel=make_channel): - client._speech_api = _gax.GAPICSpeechAPI(client) + with _Monkey(_gax, SpeechClient=speech_api): + with mock.patch.object(_helpers, 'make_secure_channel') as msc: + client._speech_api = _gax.GAPICSpeechAPI(client) + + low_level = client.speech_api._gapic_api + self.assertIsInstance(low_level, _MockGAPICSpeechAPI) + self.assertIs(low_level._channel, msc.return_value) - low_level = client.speech_api._gapic_api - self.assertIsInstance(low_level, _MockGAPICSpeechAPI) - self.assertIs(low_level._channel, channel_obj) - expected = ( - creds, _gax.DEFAULT_USER_AGENT, low_level.SERVICE_ADDRESS) - self.assertEqual(channel_args, [expected]) + assert msc.mock_calls[0] == mock.call( + creds, + _gax.DEFAULT_USER_AGENT, + low_level.SERVICE_ADDRESS, + ) def test_speech_api_without_gax(self): from google.cloud._http import Connection diff --git a/speech/tests/unit/test_helpers.py b/speech/tests/unit/test_helpers.py new file mode 100644 index 000000000000..e12507d6565a --- /dev/null +++ b/speech/tests/unit/test_helpers.py @@ -0,0 +1,66 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +from types import GeneratorType +import unittest + +import mock + + +class TestSpeechClient(unittest.TestCase): + + @staticmethod + def _make_one(): + import google.auth.credentials + from google.cloud.speech_v1 import SpeechClient + + credentials = mock.Mock(spec=google.auth.credentials.Credentials) + return SpeechClient(credentials=credentials) + + def test_inherited_method(self): + from google.cloud.speech_v1 import types + + client = self._make_one() + + config = types.RecognitionConfig(encoding='FLAC') + audio = types.RecognitionAudio(uri='http://foo.com/bar.wav') + with mock.patch.object(client, '_recognize') as recognize: + client.recognize(config, audio) + + # Assert that the underlying GAPIC method was called as expected. + recognize.assert_called_once_with(types.RecognizeRequest( + config=config, + audio=audio, + ), None) + + def test_streaming_recognize(self): + from google.cloud.speech_v1 import types + + client = self._make_one() + + config = types.StreamingRecognitionConfig() + requests = [types.StreamingRecognizeRequest(audio_content=b'...')] + with mock.patch.object(client, '_streaming_recognize') as sr: + client.streaming_recognize(config, requests) + + # Assert that we called streaming recognize with an iterable + # that evalutes to the correct format. + _, args, _ = sr.mock_calls[0] + api_requests = args[0] + assert isinstance(api_requests, GeneratorType) + assert list(api_requests) == [ + types.StreamingRecognizeRequest(streaming_config=config), + requests[0], + ] From d946feb3bc371930aa77285d2b2b437b60f9c69f Mon Sep 17 00:00:00 2001 From: David Raleigh Date: Sun, 16 Jul 2017 09:09:26 -0700 Subject: [PATCH 14/62] fix big query documentation broken link (#3611) closes issue https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3610 --- bigquery/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery/README.rst b/bigquery/README.rst index 1dcea16e0cc5..97a94366a49a 100644 --- a/bigquery/README.rst +++ b/bigquery/README.rst @@ -9,7 +9,7 @@ Python Client for Google BigQuery - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery/usage.html Quick Start ----------- From 07ff6096f27707455c2bb16179c83d1414329ee4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Sun, 16 Jul 2017 18:57:38 -0700 Subject: [PATCH 15/62] Fixing merge conflict in `setup.py` for Speech. (#3609) --- speech/setup.py | 24 ++---------------------- 1 file changed, 2 insertions(+), 22 deletions(-) diff --git a/speech/setup.py b/speech/setup.py index 7bd990e2be3d..1075df837141 100644 --- a/speech/setup.py +++ b/speech/setup.py @@ -37,7 +37,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -58,8 +58,6 @@ ] setup( - author='Google Cloud Platform', - author_email='googleapis-packages@google.com', name='google-cloud-speech', version='0.27.0', description='Python Client for Google Cloud Speech', @@ -74,23 +72,5 @@ ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, - url='https://github.com/GoogleCloudPlatform/google-cloud-python', - license='Apache 2.0', - platforms='Posix; MacOS X; Windows', - include_package_data=True, - zip_safe=False, - scripts=[], - classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Topic :: Internet', - ], + **SETUP_BASE ) From b3146807eed693cc0e255ee43cade546cc73ceb8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 17 Jul 2017 10:03:33 -0700 Subject: [PATCH 16/62] Using `CopyFrom` to set protobuf message fields (instead of `MergeFrom`). (#3612) Fixes #3571. --- bigtable/google/cloud/bigtable/table.py | 2 +- bigtable/tests/unit/test_table.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 3ed2d20ea975..ad6fab88dcf9 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -200,7 +200,7 @@ def create(self, initial_split_keys=None, column_families=()): table_pb = table_v2_pb2.Table() for col_fam in column_families: curr_id = col_fam.column_family_id - table_pb.column_families[curr_id].MergeFrom(col_fam.to_pb()) + table_pb.column_families[curr_id].CopyFrom(col_fam.to_pb()) request_pb = table_admin_messages_v2_pb2.CreateTableRequest( initial_splits=initial_split_keys or [], diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index d985f7eb2f0f..c59667d6a821 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -255,7 +255,7 @@ def _create_test_helper(self, initial_split_keys, column_families=()): for cf in column_families: cf_pb = table_pb.column_families[cf.column_family_id] if cf.gc_rule is not None: - cf_pb.gc_rule.MergeFrom(cf.gc_rule.to_pb()) + cf_pb.gc_rule.CopyFrom(cf.gc_rule.to_pb()) request_pb = _CreateTableRequestPB( initial_splits=splits_pb, parent=self.INSTANCE_NAME, From f755633c1ac9883d155baa9b68e36522c9774df6 Mon Sep 17 00:00:00 2001 From: Argyris Zymnis Date: Mon, 17 Jul 2017 10:54:45 -0700 Subject: [PATCH 17/62] Add a __hash__ implementation to SchemaField (#3601) * Add a __hash__ implementation to SchemaField * Modify default list of subfields to be the empty tuple * Making SchemaField immutable. * Adding SchemaField.__ne__. --- bigquery/google/cloud/bigquery/schema.py | 92 ++++++++++++--- bigquery/google/cloud/bigquery/table.py | 4 +- bigquery/tests/unit/test_query.py | 6 +- bigquery/tests/unit/test_schema.py | 136 +++++++++++++++++------ 4 files changed, 186 insertions(+), 52 deletions(-) diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py index 6d4a437a809f..faec69f616da 100644 --- a/bigquery/google/cloud/bigquery/schema.py +++ b/bigquery/google/cloud/bigquery/schema.py @@ -26,27 +26,89 @@ class SchemaField(object): 'FLOAT', 'BOOLEAN', 'TIMESTAMP' or 'RECORD'). :type mode: str - :param mode: the type of the field (one of 'NULLABLE', 'REQUIRED', + :param mode: the mode of the field (one of 'NULLABLE', 'REQUIRED', or 'REPEATED'). :type description: str :param description: optional description for the field. - :type fields: list of :class:`SchemaField`, or None + :type fields: tuple of :class:`SchemaField` :param fields: subfields (requires ``field_type`` of 'RECORD'). """ - def __init__(self, name, field_type, mode='NULLABLE', description=None, - fields=None): - self.name = name - self.field_type = field_type - self.mode = mode - self.description = description - self.fields = fields + def __init__(self, name, field_type, mode='NULLABLE', + description=None, fields=()): + self._name = name + self._field_type = field_type + self._mode = mode + self._description = description + self._fields = tuple(fields) - def __eq__(self, other): + @property + def name(self): + """str: The name of the field.""" + return self._name + + @property + def field_type(self): + """str: The type of the field. + + Will be one of 'STRING', 'INTEGER', 'FLOAT', 'BOOLEAN', + 'TIMESTAMP' or 'RECORD'. + """ + return self._field_type + + @property + def mode(self): + """str: The mode of the field. + + Will be one of 'NULLABLE', 'REQUIRED', or 'REPEATED'. + """ + return self._mode + + @property + def description(self): + """Optional[str]: Description for the field.""" + return self._description + + @property + def fields(self): + """tuple: Subfields contained in this field. + + If ``field_type`` is not 'RECORD', this property must be + empty / unset. + """ + return self._fields + + def _key(self): + """A tuple key that unique-ly describes this field. + + Used to compute this instance's hashcode and evaluate equality. + + Returns: + tuple: The contents of this :class:`SchemaField`. + """ return ( - self.name == other.name and - self.field_type.lower() == other.field_type.lower() and - self.mode == other.mode and - self.description == other.description and - self.fields == other.fields) + self._name, + self._field_type.lower(), + self._mode, + self._description, + self._fields, + ) + + def __eq__(self, other): + if isinstance(other, SchemaField): + return self._key() == other._key() + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, SchemaField): + return self._key() != other._key() + else: + return NotImplemented + + def __hash__(self): + return hash(self._key()) + + def __repr__(self): + return 'SchemaField{}'.format(self._key()) diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 37dc1159cc8e..2c4064e83e8f 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -1079,7 +1079,7 @@ def _parse_schema_resource(info): present in ``info``. """ if 'fields' not in info: - return None + return () schema = [] for r_field in info['fields']: @@ -1109,7 +1109,7 @@ def _build_schema_resource(fields): 'mode': field.mode} if field.description is not None: info['description'] = field.description - if field.fields is not None: + if field.fields: info['fields'] = _build_schema_resource(field.fields) infos.append(info) return infos diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index d7977a4e7d0c..76d5057f6450 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -88,9 +88,9 @@ def _verifySchema(self, query, resource): self.assertEqual(found.mode, expected['mode']) self.assertEqual(found.description, expected.get('description')) - self.assertEqual(found.fields, expected.get('fields')) + self.assertEqual(found.fields, expected.get('fields', ())) else: - self.assertIsNone(query.schema) + self.assertEqual(query.schema, ()) def _verifyRows(self, query, resource): expected = resource.get('rows') @@ -166,7 +166,7 @@ def test_ctor_defaults(self): self.assertIsNone(query.page_token) self.assertEqual(query.query_parameters, []) self.assertEqual(query.rows, []) - self.assertIsNone(query.schema) + self.assertEqual(query.schema, ()) self.assertIsNone(query.total_rows) self.assertIsNone(query.total_bytes_processed) self.assertEqual(query.udf_resources, []) diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py index 8081fcd6f4e0..018736d31bc1 100644 --- a/bigquery/tests/unit/test_schema.py +++ b/bigquery/tests/unit/test_schema.py @@ -26,43 +26,72 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor_defaults(self): + def test_constructor_defaults(self): field = self._make_one('test', 'STRING') - self.assertEqual(field.name, 'test') - self.assertEqual(field.field_type, 'STRING') - self.assertEqual(field.mode, 'NULLABLE') - self.assertIsNone(field.description) - self.assertIsNone(field.fields) + self.assertEqual(field._name, 'test') + self.assertEqual(field._field_type, 'STRING') + self.assertEqual(field._mode, 'NULLABLE') + self.assertIsNone(field._description) + self.assertEqual(field._fields, ()) - def test_ctor_explicit(self): + def test_constructor_explicit(self): field = self._make_one('test', 'STRING', mode='REQUIRED', description='Testing') - self.assertEqual(field.name, 'test') - self.assertEqual(field.field_type, 'STRING') - self.assertEqual(field.mode, 'REQUIRED') - self.assertEqual(field.description, 'Testing') - self.assertIsNone(field.fields) - - def test_ctor_subfields(self): + self.assertEqual(field._name, 'test') + self.assertEqual(field._field_type, 'STRING') + self.assertEqual(field._mode, 'REQUIRED') + self.assertEqual(field._description, 'Testing') + self.assertEqual(field._fields, ()) + + def test_constructor_subfields(self): + sub_field1 = self._make_one('area_code', 'STRING') + sub_field2 = self._make_one('local_number', 'STRING') field = self._make_one( - 'phone_number', 'RECORD', - fields=[self._make_one('area_code', 'STRING'), - self._make_one('local_number', 'STRING')]) - self.assertEqual(field.name, 'phone_number') - self.assertEqual(field.field_type, 'RECORD') - self.assertEqual(field.mode, 'NULLABLE') - self.assertIsNone(field.description) - self.assertEqual(len(field.fields), 2) - self.assertEqual(field.fields[0].name, 'area_code') - self.assertEqual(field.fields[0].field_type, 'STRING') - self.assertEqual(field.fields[0].mode, 'NULLABLE') - self.assertIsNone(field.fields[0].description) - self.assertIsNone(field.fields[0].fields) - self.assertEqual(field.fields[1].name, 'local_number') - self.assertEqual(field.fields[1].field_type, 'STRING') - self.assertEqual(field.fields[1].mode, 'NULLABLE') - self.assertIsNone(field.fields[1].description) - self.assertIsNone(field.fields[1].fields) + 'phone_number', + 'RECORD', + fields=[sub_field1, sub_field2], + ) + self.assertEqual(field._name, 'phone_number') + self.assertEqual(field._field_type, 'RECORD') + self.assertEqual(field._mode, 'NULLABLE') + self.assertIsNone(field._description) + self.assertEqual(len(field._fields), 2) + self.assertIs(field._fields[0], sub_field1) + self.assertIs(field._fields[1], sub_field2) + + def test_name_property(self): + name = 'lemon-ness' + schema_field = self._make_one(name, 'INTEGER') + self.assertIs(schema_field.name, name) + + def test_field_type_property(self): + field_type = 'BOOLEAN' + schema_field = self._make_one('whether', field_type) + self.assertIs(schema_field.field_type, field_type) + + def test_mode_property(self): + mode = 'REPEATED' + schema_field = self._make_one('again', 'FLOAT', mode=mode) + self.assertIs(schema_field.mode, mode) + + def test_description_property(self): + description = 'It holds some data.' + schema_field = self._make_one( + 'do', 'TIMESTAMP', description=description) + self.assertIs(schema_field.description, description) + + def test_fields_property(self): + sub_field1 = self._make_one('one', 'STRING') + sub_field2 = self._make_one('fish', 'INTEGER') + fields = (sub_field1, sub_field2) + schema_field = self._make_one('boat', 'RECORD', fields=fields) + self.assertIs(schema_field.fields, fields) + + def test___eq___wrong_type(self): + field = self._make_one('test', 'STRING') + other = object() + self.assertNotEqual(field, other) + self.assertIs(field.__eq__(other), NotImplemented) def test___eq___name_mismatch(self): field = self._make_one('test', 'STRING') @@ -111,3 +140,46 @@ def test___eq___hit_w_fields(self): field = self._make_one('test', 'RECORD', fields=[sub1, sub2]) other = self._make_one('test', 'RECORD', fields=[sub1, sub2]) self.assertEqual(field, other) + + def test___ne___wrong_type(self): + field = self._make_one('toast', 'INTEGER') + other = object() + self.assertNotEqual(field, other) + self.assertIs(field.__ne__(other), NotImplemented) + + def test___ne___same_value(self): + field1 = self._make_one('test', 'TIMESTAMP', mode='REPEATED') + field2 = self._make_one('test', 'TIMESTAMP', mode='REPEATED') + # unittest ``assertEqual`` uses ``==`` not ``!=``. + comparison_val = (field1 != field2) + self.assertFalse(comparison_val) + + def test___ne___different_values(self): + field1 = self._make_one( + 'test1', 'FLOAT', mode='REPEATED', description='Not same') + field2 = self._make_one( + 'test2', 'FLOAT', mode='NULLABLE', description='Knot saym') + self.assertNotEqual(field1, field2) + + def test___hash__set_equality(self): + sub1 = self._make_one('sub1', 'STRING') + sub2 = self._make_one('sub2', 'STRING') + field1 = self._make_one('test', 'RECORD', fields=[sub1]) + field2 = self._make_one('test', 'RECORD', fields=[sub2]) + set_one = {field1, field2} + set_two = {field1, field2} + self.assertEqual(set_one, set_two) + + def test___hash__not_equals(self): + sub1 = self._make_one('sub1', 'STRING') + sub2 = self._make_one('sub2', 'STRING') + field1 = self._make_one('test', 'RECORD', fields=[sub1]) + field2 = self._make_one('test', 'RECORD', fields=[sub2]) + set_one = {field1} + set_two = {field2} + self.assertNotEqual(set_one, set_two) + + def test___repr__(self): + field1 = self._make_one('field1', 'STRING') + expected = "SchemaField('field1', 'string', 'NULLABLE', None, ())" + self.assertEqual(repr(field1), expected) From c6b5727c096347b77f416a67afd84e023c232310 Mon Sep 17 00:00:00 2001 From: Evawere Ogbe Date: Mon, 17 Jul 2017 12:29:46 -0700 Subject: [PATCH 18/62] Add bigquery jobid to table (#3605) --- bigquery/google/cloud/bigquery/table.py | 15 ++++++++++++--- bigquery/tests/unit/test_table.py | 16 ++++++++++++++++ 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 2c4064e83e8f..7e21e35d1fb0 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -842,7 +842,8 @@ def upload_from_file(self, quote_character=None, skip_leading_rows=None, write_disposition=None, - client=None): + client=None, + job_name=None): """Upload the contents of this table from a file-like object. The content type of the upload will either be @@ -915,6 +916,10 @@ def upload_from_file(self, :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current dataset. + :type job_name: str + :param job_name: Optional. The id of the job. Generated if not + explicitly passed in. + :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob` :returns: the job instance used to load the data (e.g., for querying status). Note that the job is already started: @@ -977,7 +982,7 @@ def upload_from_file(self, encoding, field_delimiter, ignore_unknown_values, max_bad_records, quote_character, skip_leading_rows, - write_disposition) + write_disposition, job_name) upload = Upload(file_obj, content_type, total_bytes, auto_transfer=False) @@ -1033,7 +1038,8 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments max_bad_records, quote_character, skip_leading_rows, - write_disposition): + write_disposition, + job_name): """Helper for :meth:`Table.upload_from_file`.""" load_config = metadata['configuration']['load'] @@ -1067,6 +1073,9 @@ def _configure_job_metadata(metadata, # pylint: disable=too-many-arguments if write_disposition is not None: load_config['writeDisposition'] = write_disposition + if job_name is not None: + load_config['jobReference'] = {'jobId': job_name} + def _parse_schema_resource(info): """Parse a resource fragment into a schema field. diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index b27736fb896e..f535e8799628 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -1844,6 +1844,22 @@ class _UploadConfig(object): self.assertEqual(req['body'], BODY) # pylint: enable=too-many-statements + def test_upload_from_file_w_jobid(self): + import json + from google.cloud._helpers import _to_bytes + + requested, PATH, BODY = self._upload_from_file_helper(job_name='foo') + parse_chunk = _email_chunk_parser() + req = requested[0] + ctype, boundary = [x.strip() + for x in req['headers']['content-type'].split(';')] + divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) + chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog + text_msg = parse_chunk(chunks[0].strip()) + metadata = json.loads(text_msg._payload) + load_config = metadata['configuration']['load'] + self.assertEqual(load_config['jobReference'], {'jobId': 'foo'}) + class Test_parse_schema_resource(unittest.TestCase, _SchemaBase): From c685055db0cc4c392600f35d2c006fe6b3f3482f Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 17 Jul 2017 14:27:51 -0700 Subject: [PATCH 19/62] Add base future package to google.cloud (#3616) --- core/.coveragerc | 3 + core/google/cloud/_helpers.py | 23 ++++ core/google/cloud/future/__init__.py | 21 +++ core/google/cloud/future/_helpers.py | 39 ++++++ core/google/cloud/future/base.py | 175 ++++++++++++++++++++++++ core/tests/unit/future/__init__.py | 0 core/tests/unit/future/test__helpers.py | 37 +++++ core/tests/unit/future/test_base.py | 145 ++++++++++++++++++++ core/tests/unit/test__helpers.py | 29 ++++ 9 files changed, 472 insertions(+) create mode 100644 core/google/cloud/future/__init__.py create mode 100644 core/google/cloud/future/_helpers.py create mode 100644 core/google/cloud/future/base.py create mode 100644 core/tests/unit/future/__init__.py create mode 100644 core/tests/unit/future/test__helpers.py create mode 100644 core/tests/unit/future/test_base.py diff --git a/core/.coveragerc b/core/.coveragerc index 9d89b1db5666..ce75f605a508 100644 --- a/core/.coveragerc +++ b/core/.coveragerc @@ -13,3 +13,6 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError + raise NotImplementedError() diff --git a/core/google/cloud/_helpers.py b/core/google/cloud/_helpers.py index 2c2f08dcfb45..72918e064507 100644 --- a/core/google/cloud/_helpers.py +++ b/core/google/cloud/_helpers.py @@ -379,6 +379,29 @@ def _bytes_to_unicode(value): raise ValueError('%r could not be converted to unicode' % (value,)) +def _from_any_pb(pb_type, any_pb): + """Converts an Any protobuf to the specified message type + + Args: + pb_type (type): the type of the message that any_pb stores an instance + of. + any_pb (google.protobuf.any_pb2.Any): the object to be converted. + + Returns: + pb_type: An instance of the pb_type message. + + Raises: + TypeError: if the message could not be converted. + """ + msg = pb_type() + if not any_pb.Unpack(msg): + raise TypeError( + 'Could not convert {} to {}'.format( + any_pb.__class__.__name__, pb_type.__name__)) + + return msg + + def _pb_timestamp_to_datetime(timestamp_pb): """Convert a Timestamp protobuf to a datetime object. diff --git a/core/google/cloud/future/__init__.py b/core/google/cloud/future/__init__.py new file mode 100644 index 000000000000..e5cf2b20ce7e --- /dev/null +++ b/core/google/cloud/future/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Futures for dealing with asynchronous operations.""" + +from google.cloud.future.base import Future + +__all__ = [ + 'Future', +] diff --git a/core/google/cloud/future/_helpers.py b/core/google/cloud/future/_helpers.py new file mode 100644 index 000000000000..933d0b8b2d44 --- /dev/null +++ b/core/google/cloud/future/_helpers.py @@ -0,0 +1,39 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Private helpers for futures.""" + +import logging +import threading + + +_LOGGER = logging.getLogger(__name__) + + +def start_daemon_thread(*args, **kwargs): + """Starts a thread and marks it as a daemon thread.""" + thread = threading.Thread(*args, **kwargs) + thread.daemon = True + thread.start() + return thread + + +def safe_invoke_callback(callback, *args, **kwargs): + """Invoke a callback, swallowing and logging any exceptions.""" + # pylint: disable=bare-except + # We intentionally want to swallow all exceptions. + try: + return callback(*args, **kwargs) + except: + _LOGGER.exception('Error while executing Future callback.') diff --git a/core/google/cloud/future/base.py b/core/google/cloud/future/base.py new file mode 100644 index 000000000000..928269506b65 --- /dev/null +++ b/core/google/cloud/future/base.py @@ -0,0 +1,175 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abstract and helper bases for Future implementations.""" + +import abc + +import six + +from google.cloud.future import _helpers + + +@six.add_metaclass(abc.ABCMeta) +class Future(object): + # pylint: disable=missing-docstring + # We inherit the interfaces here from concurrent.futures. + + """Future interface. + + This interface is based on :class:`concurrent.futures.Future`. + """ + + @abc.abstractmethod + def cancel(self): + raise NotImplementedError() + + @abc.abstractmethod + def cancelled(self): + raise NotImplementedError() + + @abc.abstractmethod + def running(self): + raise NotImplementedError() + + @abc.abstractmethod + def done(self): + raise NotImplementedError() + + @abc.abstractmethod + def result(self, timeout=None): + raise NotImplementedError() + + @abc.abstractmethod + def exception(self, timeout=None): + raise NotImplementedError() + + @abc.abstractmethod + def add_done_callback(self, fn): + # pylint: disable=invalid-name + raise NotImplementedError() + + @abc.abstractmethod + def set_result(self, result): + raise NotImplementedError() + + @abc.abstractmethod + def set_exception(self, exception): + raise NotImplementedError() + + +class PollingFuture(Future): + """A Future that needs to poll some service to check its status. + + The private :meth:`_blocking_poll` method should be implemented by + subclasses. + + .. note: Privacy here is intended to prevent the final class from + overexposing, not to prevent subclasses from accessing methods. + """ + def __init__(self): + super(PollingFuture, self).__init__() + self._result = None + self._exception = None + self._result_set = False + """bool: Set to True when the result has been set via set_result or + set_exception.""" + self._polling_thread = None + self._done_callbacks = [] + + @abc.abstractmethod + def _blocking_poll(self, timeout=None): + """Poll and wait for the Future to be resolved. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + """ + # pylint: disable=missing-raises + raise NotImplementedError() + + def result(self, timeout=None): + """Get the result of the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + google.protobuf.Message: The Operation's result. + + Raises: + google.gax.GaxError: If the operation errors or if the timeout is + reached before the operation completes. + """ + self._blocking_poll() + + if self._exception is not None: + # pylint: disable=raising-bad-type + # Pylint doesn't recognize that this is valid in this case. + raise self._exception + + return self._result + + def exception(self, timeout=None): + """Get the exception from the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + Optional[google.gax.GaxError]: The operation's error. + """ + self._blocking_poll() + return self._exception + + def add_done_callback(self, fn): + """Add a callback to be executed when the operation is complete. + + If the operation is not already complete, this will start a helper + thread to poll for the status of the operation in the background. + + Args: + fn (Callable[Future]): The callback to execute when the operation + is complete. + """ + if self._result_set: + _helpers.safe_invoke_callback(fn, self) + return + + self._done_callbacks.append(fn) + + if self._polling_thread is None: + # The polling thread will exit on its own as soon as the operation + # is done. + self._polling_thread = _helpers.start_daemon_thread( + target=self._blocking_poll) + + def _invoke_callbacks(self, *args, **kwargs): + """Invoke all done callbacks.""" + for callback in self._done_callbacks: + _helpers.safe_invoke_callback(callback, *args, **kwargs) + + def set_result(self, result): + """Set the Future's result.""" + self._result = result + self._result_set = True + self._invoke_callbacks(self) + + def set_exception(self, exception): + """Set the Future's exception.""" + self._exception = exception + self._result_set = True + self._invoke_callbacks(self) diff --git a/core/tests/unit/future/__init__.py b/core/tests/unit/future/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/core/tests/unit/future/test__helpers.py b/core/tests/unit/future/test__helpers.py new file mode 100644 index 000000000000..cbca5ba4d4df --- /dev/null +++ b/core/tests/unit/future/test__helpers.py @@ -0,0 +1,37 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from google.cloud.future import _helpers + + +@mock.patch('threading.Thread', autospec=True) +def test_start_deamon_thread(unused_thread): + deamon_thread = _helpers.start_daemon_thread(target=mock.sentinel.target) + assert deamon_thread.daemon is True + + +def test_safe_invoke_callback(): + callback = mock.Mock(spec=['__call__'], return_value=42) + result = _helpers.safe_invoke_callback(callback, 'a', b='c') + assert result == 42 + callback.assert_called_once_with('a', b='c') + + +def test_safe_invoke_callback_exception(): + callback = mock.Mock(spec=['__call__'], side_effect=ValueError()) + result = _helpers.safe_invoke_callback(callback, 'a', b='c') + assert result is None + callback.assert_called_once_with('a', b='c') diff --git a/core/tests/unit/future/test_base.py b/core/tests/unit/future/test_base.py new file mode 100644 index 000000000000..f10c10b24fb4 --- /dev/null +++ b/core/tests/unit/future/test_base.py @@ -0,0 +1,145 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +import mock +import pytest + +from google.cloud.future import base + + +class PollingFutureImpl(base.PollingFuture): + def _blocking_poll(self, timeout=None): # pragma: NO COVER + pass + + def cancel(self): + return True + + def cancelled(self): + return False + + def done(self): + return False + + def running(self): + return True + + +def test_polling_future_constructor(): + future = PollingFutureImpl() + assert not future.done() + assert not future.cancelled() + assert future.running() + assert future.cancel() + + +def test_set_result(): + future = PollingFutureImpl() + callback = mock.Mock() + + future.set_result(1) + + assert future.result() == 1 + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_set_exception(): + future = PollingFutureImpl() + exception = ValueError('meep') + + future.set_exception(exception) + + assert future.exception() == exception + with pytest.raises(ValueError): + future.result() + + callback = mock.Mock() + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_invoke_callback_exception(): + future = PollingFutureImplWithPoll() + future.set_result(42) + + # This should not raise, despite the callback causing an exception. + callback = mock.Mock(side_effect=ValueError) + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +class PollingFutureImplWithPoll(PollingFutureImpl): + def __init__(self): + super(PollingFutureImplWithPoll, self).__init__() + self.poll_count = 0 + self.event = threading.Event() + + def _blocking_poll(self, timeout=None): + if self._result_set: + return + + self.poll_count += 1 + self.event.wait() + self.set_result(42) + + +def test_result_with_polling(): + future = PollingFutureImplWithPoll() + + future.event.set() + result = future.result() + + assert result == 42 + assert future.poll_count == 1 + # Repeated calls should not cause additional polling + assert future.result() == result + assert future.poll_count == 1 + + +def test_callback_background_thread(): + future = PollingFutureImplWithPoll() + callback = mock.Mock() + + future.add_done_callback(callback) + + assert future._polling_thread is not None + assert future.poll_count == 1 + + future.event.set() + future._polling_thread.join() + + callback.assert_called_once_with(future) + + +def test_double_callback_background_thread(): + future = PollingFutureImplWithPoll() + callback = mock.Mock() + callback2 = mock.Mock() + + future.add_done_callback(callback) + current_thread = future._polling_thread + assert current_thread is not None + + # only one polling thread should be created. + future.add_done_callback(callback2) + assert future._polling_thread is current_thread + + future.event.set() + future._polling_thread.join() + + assert future.poll_count == 1 + callback.assert_called_once_with(future) + callback2.assert_called_once_with(future) diff --git a/core/tests/unit/test__helpers.py b/core/tests/unit/test__helpers.py index fcd47f7535bc..f7ba1b2c109f 100644 --- a/core/tests/unit/test__helpers.py +++ b/core/tests/unit/test__helpers.py @@ -554,6 +554,35 @@ def test_it(self): self.assertEqual(self._call_fut(timestamp), dt_stamp) +class Test__from_any_pb(unittest.TestCase): + + def _call_fut(self, pb_type, any_pb): + from google.cloud._helpers import _from_any_pb + + return _from_any_pb(pb_type, any_pb) + + def test_success(self): + from google.protobuf import any_pb2 + from google.type import date_pb2 + + in_message = date_pb2.Date(year=1990) + in_message_any = any_pb2.Any() + in_message_any.Pack(in_message) + out_message = self._call_fut(date_pb2.Date, in_message_any) + self.assertEqual(in_message, out_message) + + def test_failure(self, ): + from google.protobuf import any_pb2 + from google.type import date_pb2 + from google.type import timeofday_pb2 + + in_message = any_pb2.Any() + in_message.Pack(date_pb2.Date(year=1990)) + + with self.assertRaises(TypeError): + self._call_fut(timeofday_pb2.TimeOfDay, in_message) + + class Test__pb_timestamp_to_rfc3339(unittest.TestCase): def _call_fut(self, timestamp): From f4a6bfdf498d788c2e88b7bbcd9e32aedc54024f Mon Sep 17 00:00:00 2001 From: Maerig Date: Tue, 18 Jul 2017 23:28:54 +0900 Subject: [PATCH 20/62] Fix a typo in BigQuery usage documentation (#3621) --- docs/bigquery/usage.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/bigquery/usage.rst b/docs/bigquery/usage.rst index aaa63e91b679..77252e210ccd 100644 --- a/docs/bigquery/usage.rst +++ b/docs/bigquery/usage.rst @@ -307,7 +307,7 @@ Retrieve the results: .. code-block:: python >>> results = job.results() - >>> rows, total_count, token = query.fetch_data() # API requet + >>> rows, total_count, token = query.fetch_data() # API request >>> while True: ... do_something_with(rows) ... if token is None: From 2c5c306ac0b5b7eed5608f115f735b0254cd75ce Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 18 Jul 2017 11:46:05 -0700 Subject: [PATCH 21/62] Using assertEqual instead of assertEquals. (#3619) `assertEquals` is deprecated (but still is a synonym). --- bigquery/tests/unit/test_dbapi_cursor.py | 70 ++++++++++---------- logging/tests/unit/handlers/test__helpers.py | 2 +- 2 files changed, 36 insertions(+), 36 deletions(-) diff --git a/bigquery/tests/unit/test_dbapi_cursor.py b/bigquery/tests/unit/test_dbapi_cursor.py index 901d2f176785..9671a27b8f8f 100644 --- a/bigquery/tests/unit/test_dbapi_cursor.py +++ b/bigquery/tests/unit/test_dbapi_cursor.py @@ -90,7 +90,7 @@ def test_fetchone_w_row(self): cursor = connection.cursor() cursor.execute('SELECT 1;') row = cursor.fetchone() - self.assertEquals(row, (1,)) + self.assertEqual(row, (1,)) self.assertIsNone(cursor.fetchone()) def test_fetchmany_wo_execute_raises_error(self): @@ -106,8 +106,8 @@ def test_fetchmany_w_row(self): cursor = connection.cursor() cursor.execute('SELECT 1;') rows = cursor.fetchmany() - self.assertEquals(len(rows), 1) - self.assertEquals(rows[0], (1,)) + self.assertEqual(len(rows), 1) + self.assertEqual(rows[0], (1,)) def test_fetchmany_w_size(self): from google.cloud.bigquery import dbapi @@ -121,14 +121,14 @@ def test_fetchmany_w_size(self): cursor = connection.cursor() cursor.execute('SELECT a, b, c;') rows = cursor.fetchmany(size=2) - self.assertEquals(len(rows), 2) - self.assertEquals(rows[0], (1, 2, 3)) - self.assertEquals(rows[1], (4, 5, 6)) + self.assertEqual(len(rows), 2) + self.assertEqual(rows[0], (1, 2, 3)) + self.assertEqual(rows[1], (4, 5, 6)) second_page = cursor.fetchmany(size=2) - self.assertEquals(len(second_page), 1) - self.assertEquals(second_page[0], (7, 8, 9)) + self.assertEqual(len(second_page), 1) + self.assertEqual(second_page[0], (7, 8, 9)) third_page = cursor.fetchmany(size=2) - self.assertEquals(third_page, []) + self.assertEqual(third_page, []) def test_fetchmany_w_arraysize(self): from google.cloud.bigquery import dbapi @@ -143,14 +143,14 @@ def test_fetchmany_w_arraysize(self): cursor.arraysize = 2 cursor.execute('SELECT a, b, c;') rows = cursor.fetchmany() - self.assertEquals(len(rows), 2) - self.assertEquals(rows[0], (1, 2, 3)) - self.assertEquals(rows[1], (4, 5, 6)) + self.assertEqual(len(rows), 2) + self.assertEqual(rows[0], (1, 2, 3)) + self.assertEqual(rows[1], (4, 5, 6)) second_page = cursor.fetchmany() - self.assertEquals(len(second_page), 1) - self.assertEquals(second_page[0], (7, 8, 9)) + self.assertEqual(len(second_page), 1) + self.assertEqual(second_page[0], (7, 8, 9)) third_page = cursor.fetchmany() - self.assertEquals(third_page, []) + self.assertEqual(third_page, []) def test_fetchall_wo_execute_raises_error(self): from google.cloud.bigquery import dbapi @@ -165,10 +165,10 @@ def test_fetchall_w_row(self): cursor = connection.cursor() cursor.execute('SELECT 1;') self.assertIsNone(cursor.description) - self.assertEquals(cursor.rowcount, 1) + self.assertEqual(cursor.rowcount, 1) rows = cursor.fetchall() - self.assertEquals(len(rows), 1) - self.assertEquals(rows[0], (1,)) + self.assertEqual(len(rows), 1) + self.assertEqual(rows[0], (1,)) def test_execute_w_dml(self): from google.cloud.bigquery.dbapi import connect @@ -177,7 +177,7 @@ def test_execute_w_dml(self): cursor = connection.cursor() cursor.execute('DELETE FROM UserSessions WHERE user_id = \'test\';') self.assertIsNone(cursor.description) - self.assertEquals(cursor.rowcount, 12) + self.assertEqual(cursor.rowcount, 12) def test_execute_w_query(self): from google.cloud.bigquery.schema import SchemaField @@ -193,29 +193,29 @@ def test_execute_w_query(self): cursor.execute('SELECT a, b, c FROM hello_world WHERE d > 3;') # Verify the description. - self.assertEquals(len(cursor.description), 3) + self.assertEqual(len(cursor.description), 3) a_name, a_type, _, _, _, _, a_null_ok = cursor.description[0] - self.assertEquals(a_name, 'a') - self.assertEquals(a_type, 'STRING') - self.assertEquals(a_type, dbapi.STRING) + self.assertEqual(a_name, 'a') + self.assertEqual(a_type, 'STRING') + self.assertEqual(a_type, dbapi.STRING) self.assertTrue(a_null_ok) b_name, b_type, _, _, _, _, b_null_ok = cursor.description[1] - self.assertEquals(b_name, 'b') - self.assertEquals(b_type, 'STRING') - self.assertEquals(b_type, dbapi.STRING) + self.assertEqual(b_name, 'b') + self.assertEqual(b_type, 'STRING') + self.assertEqual(b_type, dbapi.STRING) self.assertFalse(b_null_ok) c_name, c_type, _, _, _, _, c_null_ok = cursor.description[2] - self.assertEquals(c_name, 'c') - self.assertEquals(c_type, 'INTEGER') - self.assertEquals(c_type, dbapi.NUMBER) + self.assertEqual(c_name, 'c') + self.assertEqual(c_type, 'INTEGER') + self.assertEqual(c_type, dbapi.NUMBER) self.assertTrue(c_null_ok) # Verify the results. - self.assertEquals(cursor.rowcount, 2) + self.assertEqual(cursor.rowcount, 2) row = cursor.fetchone() - self.assertEquals(row, ('hello', 'world', 1)) + self.assertEqual(row, ('hello', 'world', 1)) row = cursor.fetchone() - self.assertEquals(row, ('howdy', 'y\'all', 2)) + self.assertEqual(row, ('howdy', 'y\'all', 2)) row = cursor.fetchone() self.assertIsNone(row) @@ -228,7 +228,7 @@ def test_executemany_w_dml(self): 'DELETE FROM UserSessions WHERE user_id = %s;', (('test',), ('anothertest',))) self.assertIsNone(cursor.description) - self.assertEquals(cursor.rowcount, 12) + self.assertEqual(cursor.rowcount, 12) def test__format_operation_w_dict(self): from google.cloud.bigquery.dbapi import cursor @@ -238,7 +238,7 @@ def test__format_operation_w_dict(self): 'somevalue': 'hi', 'a `weird` one': 'world', }) - self.assertEquals( + self.assertEqual( formatted_operation, 'SELECT @`somevalue`, @`a \\`weird\\` one`;') def test__format_operation_w_wrong_dict(self): @@ -257,7 +257,7 @@ def test__format_operation_w_sequence(self): from google.cloud.bigquery.dbapi import cursor formatted_operation = cursor._format_operation( 'SELECT %s, %s;', ('hello', 'world')) - self.assertEquals(formatted_operation, 'SELECT ?, ?;') + self.assertEqual(formatted_operation, 'SELECT ?, ?;') def test__format_operation_w_too_short_sequence(self): from google.cloud.bigquery import dbapi diff --git a/logging/tests/unit/handlers/test__helpers.py b/logging/tests/unit/handlers/test__helpers.py index 516cd93fc2d5..f721881eea11 100644 --- a/logging/tests/unit/handlers/test__helpers.py +++ b/logging/tests/unit/handlers/test__helpers.py @@ -101,7 +101,7 @@ def test_no_context_header(self): response = req.get_response(self.create_app()) trace_id = json.loads(response.body) - self.assertEquals(None, trace_id) + self.assertEqual(None, trace_id) def test_valid_context_header(self): import webob From 8783b7fb546f3885865240016a44dc2369f7bcc7 Mon Sep 17 00:00:00 2001 From: Son CHU Date: Tue, 18 Jul 2017 23:16:30 +0200 Subject: [PATCH 22/62] Add `is_nullable` method to check for `NULLABLE` mode (#3620) Resolves: #3548 --- bigquery/google/cloud/bigquery/dbapi/cursor.py | 2 +- bigquery/google/cloud/bigquery/schema.py | 5 +++++ bigquery/tests/unit/test_schema.py | 10 ++++++++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py index 4398eec20b88..bcbb19cfd066 100644 --- a/bigquery/google/cloud/bigquery/dbapi/cursor.py +++ b/bigquery/google/cloud/bigquery/dbapi/cursor.py @@ -76,7 +76,7 @@ def _set_description(self, schema): internal_size=None, precision=None, scale=None, - null_ok=field.mode == 'NULLABLE') + null_ok=field.is_nullable) for field in schema]) def _set_rowcount(self, query_results): diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py index faec69f616da..edd8dd68f3bd 100644 --- a/bigquery/google/cloud/bigquery/schema.py +++ b/bigquery/google/cloud/bigquery/schema.py @@ -65,6 +65,11 @@ def mode(self): """ return self._mode + @property + def is_nullable(self): + """Check whether 'mode' is 'nullable'.""" + return self._mode == 'NULLABLE' + @property def description(self): """Optional[str]: Description for the field.""" diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py index 018736d31bc1..bf3cf2e025d1 100644 --- a/bigquery/tests/unit/test_schema.py +++ b/bigquery/tests/unit/test_schema.py @@ -74,6 +74,16 @@ def test_mode_property(self): schema_field = self._make_one('again', 'FLOAT', mode=mode) self.assertIs(schema_field.mode, mode) + def test_is_nullable(self): + mode = 'NULLABLE' + schema_field = self._make_one('test', 'FLOAT', mode=mode) + self.assertTrue(schema_field.is_nullable) + + def test_is_not_nullable(self): + mode = 'REPEATED' + schema_field = self._make_one('test', 'FLOAT', mode=mode) + self.assertFalse(schema_field.is_nullable) + def test_description_property(self): description = 'It holds some data.' schema_field = self._make_one( From b27a0bcf14d9259e932ebf8d351e7ab5d987823b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 18 Jul 2017 16:43:53 -0700 Subject: [PATCH 23/62] Fix "bad" storage unit tests. (#3627) These were "broken" by the release of google-resumable-media==0.2.0, but it just revealed that mocked response content was unicode when it should have been `bytes`. --- storage/tests/unit/test_blob.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/storage/tests/unit/test_blob.py b/storage/tests/unit/test_blob.py index a5d49bc4bacb..250a05bd28f4 100644 --- a/storage/tests/unit/test_blob.py +++ b/storage/tests/unit/test_blob.py @@ -954,7 +954,8 @@ def _make_resumable_transport(self, headers1, headers2, resumable_media.PERMANENT_REDIRECT, headers2) json_body = '{{"size": "{:d}"}}'.format(total_bytes) fake_response3 = self._mock_requests_response( - http_client.OK, headers3, content=json_body) + http_client.OK, headers3, + content=json_body.encode('utf-8')) responses = [fake_response1, fake_response2, fake_response3] fake_transport.request.side_effect = responses @@ -1161,7 +1162,7 @@ def test_upload_from_file_failure(self): from google.resumable_media import InvalidResponse from google.cloud import exceptions - message = u'Someone is already in this spot.' + message = b'Someone is already in this spot.' response = mock.Mock( content=message, status_code=http_client.CONFLICT, spec=[u'content', u'status_code']) @@ -1170,7 +1171,7 @@ def test_upload_from_file_failure(self): with self.assertRaises(exceptions.Conflict) as exc_info: self._upload_from_file_helper(side_effect=side_effect) - self.assertEqual(exc_info.exception.message, message) + self.assertEqual(exc_info.exception.message, message.decode('utf-8')) self.assertEqual(exc_info.exception.errors, []) def _do_upload_mock_call_helper(self, blob, client, content_type, size): @@ -1307,7 +1308,7 @@ def test_create_resumable_upload_session_with_failure(self): from google.resumable_media import InvalidResponse from google.cloud import exceptions - message = u'5-oh-3 woe is me.' + message = b'5-oh-3 woe is me.' response = mock.Mock( content=message, status_code=http_client.SERVICE_UNAVAILABLE, spec=[u'content', u'status_code']) @@ -1317,7 +1318,7 @@ def test_create_resumable_upload_session_with_failure(self): self._create_resumable_upload_session_helper( side_effect=side_effect) - self.assertEqual(exc_info.exception.message, message) + self.assertEqual(exc_info.exception.message, message.decode('utf-8')) self.assertEqual(exc_info.exception.errors, []) def test_get_iam_policy(self): @@ -2238,17 +2239,18 @@ def _helper(self, message, **kwargs): return exc_info def test_default(self): - message = u'Failure' + message = b'Failure' exc_info = self._helper(message) - self.assertEqual(exc_info.exception.message, message) + self.assertEqual(exc_info.exception.message, message.decode('utf-8')) self.assertEqual(exc_info.exception.errors, []) def test_with_error_info(self): - message = u'Eeek bad.' + message = b'Eeek bad.' error_info = 'http://test.invalid' exc_info = self._helper(message, error_info=error_info) - full_message = u'{} ({})'.format(message, error_info) + message_str = message.decode('utf-8') + full_message = u'{} ({})'.format(message_str, error_info) self.assertEqual(exc_info.exception.message, full_message) self.assertEqual(exc_info.exception.errors, []) From 7f56c0285089d5aa7b39f7c5e08da518db9015e8 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 19 Jul 2017 09:41:18 -0700 Subject: [PATCH 24/62] Add operation future (#3618) --- core/google/cloud/future/base.py | 51 ++++- core/google/cloud/future/operation.py | 247 +++++++++++++++++++++++ core/setup.py | 1 + core/tests/unit/future/test_base.py | 30 ++- core/tests/unit/future/test_operation.py | 207 +++++++++++++++++++ 5 files changed, 522 insertions(+), 14 deletions(-) create mode 100644 core/google/cloud/future/operation.py create mode 100644 core/tests/unit/future/test_operation.py diff --git a/core/google/cloud/future/base.py b/core/google/cloud/future/base.py index 928269506b65..aed1dfd80e5d 100644 --- a/core/google/cloud/future/base.py +++ b/core/google/cloud/future/base.py @@ -15,8 +15,12 @@ """Abstract and helper bases for Future implementations.""" import abc +import concurrent.futures +import functools +import operator import six +import tenacity from google.cloud.future import _helpers @@ -72,8 +76,8 @@ def set_exception(self, exception): class PollingFuture(Future): """A Future that needs to poll some service to check its status. - The private :meth:`_blocking_poll` method should be implemented by - subclasses. + The :meth:`done` method should be implemented by subclasses. The polling + behavior will repeatedly call ``done`` until it returns True. .. note: Privacy here is intended to prevent the final class from overexposing, not to prevent subclasses from accessing methods. @@ -89,6 +93,19 @@ def __init__(self): self._done_callbacks = [] @abc.abstractmethod + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + # pylint: disable=redundant-returns-doc, missing-raises-doc + raise NotImplementedError() + + def running(self): + """True if the operation is currently running.""" + return not self.done() + def _blocking_poll(self, timeout=None): """Poll and wait for the Future to be resolved. @@ -96,8 +113,32 @@ def _blocking_poll(self, timeout=None): timeout (int): How long to wait for the operation to complete. If None, wait indefinitely. """ - # pylint: disable=missing-raises - raise NotImplementedError() + if self._result_set: + return + + retry_on = tenacity.retry_if_result( + functools.partial(operator.is_not, True)) + # Use exponential backoff with jitter. + wait_on = ( + tenacity.wait_exponential(multiplier=1, max=10) + + tenacity.wait_random(0, 1)) + + if timeout is None: + retry = tenacity.retry(retry=retry_on, wait=wait_on) + else: + retry = tenacity.retry( + retry=retry_on, + wait=wait_on, + stop=tenacity.stop_after_delay(timeout)) + + try: + retry(self.done)() + except tenacity.RetryError as exc: + six.raise_from( + concurrent.futures.TimeoutError( + 'Operation did not complete within the designated ' + 'timeout.'), + exc) def result(self, timeout=None): """Get the result of the operation, blocking if necessary. @@ -113,7 +154,7 @@ def result(self, timeout=None): google.gax.GaxError: If the operation errors or if the timeout is reached before the operation completes. """ - self._blocking_poll() + self._blocking_poll(timeout=timeout) if self._exception is not None: # pylint: disable=raising-bad-type diff --git a/core/google/cloud/future/operation.py b/core/google/cloud/future/operation.py new file mode 100644 index 000000000000..5bbfda1a8f0b --- /dev/null +++ b/core/google/cloud/future/operation.py @@ -0,0 +1,247 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Futures for long-running operations returned from Google Cloud APIs.""" + +import functools +import threading + +from google.longrunning import operations_pb2 +from google.protobuf import json_format +from google.rpc import code_pb2 + +from google.cloud import _helpers +from google.cloud import exceptions +from google.cloud.future import base + + +class Operation(base.PollingFuture): + """A Future for interacting with a Google API Long-Running Operation. + + Args: + operation (google.longrunning.operations_pb2.Operation): The + initial operation. + refresh (Callable[[], Operation]): A callable that returns the + latest state of the operation. + cancel (Callable[[], None]), A callable that tries to cancel + the operation. + result_type (type): The protobuf type for the operation's result. + metadata_type (type): The protobuf type for the operation's + metadata. + """ + + def __init__( + self, operation, refresh, cancel, + result_type, metadata_type=None): + super(Operation, self).__init__() + self._operation = operation + self._refresh = refresh + self._cancel = cancel + self._result_type = result_type + self._metadata_type = metadata_type + self._completion_lock = threading.Lock() + # Invoke this in case the operation came back already complete. + self._set_result_from_operation() + + @property + def operation(self): + """google.longrunning.Operation: The current long-running operation.""" + return self._operation + + @property + def metadata(self): + """google.protobuf.Message: the current operation metadata.""" + if not self._operation.HasField('metadata'): + return None + + return _helpers._from_any_pb( + self._metadata_type, self._operation.metadata) + + def _set_result_from_operation(self): + """Set the result or exception from the operation if it is complete.""" + # This must be done in a lock to prevent the polling thread + # and main thread from both executing the completion logic + # at the same time. + with self._completion_lock: + # If the operation isn't complete or if the result has already been + # set, do not call set_result/set_exception again. + # Note: self._result_set is set to True in set_result and + # set_exception, in case those methods are invoked directly. + if not self._operation.done or self._result_set: + return + + if self._operation.HasField('response'): + response = _helpers._from_any_pb( + self._result_type, self._operation.response) + self.set_result(response) + elif self._operation.HasField('error'): + exception = exceptions.GoogleCloudError( + self._operation.error.message, + errors=(self._operation.error)) + self.set_exception(exception) + else: + exception = exceptions.GoogleCloudError( + 'Unexpected state: Long-running operation had neither ' + 'response nor error set.') + self.set_exception(exception) + + def _refresh_and_update(self): + """Refresh the operation and update the result if needed.""" + # If the currently cached operation is done, no need to make another + # RPC as it will not change once done. + if not self._operation.done: + self._operation = self._refresh() + self._set_result_from_operation() + + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + self._refresh_and_update() + return self._operation.done + + def cancel(self): + """Attempt to cancel the operation. + + Returns: + bool: True if the cancel RPC was made, False if the operation is + already complete. + """ + if self.done(): + return False + + self._cancel() + return True + + def cancelled(self): + """True if the operation was cancelled.""" + self._refresh_and_update() + return (self._operation.HasField('error') and + self._operation.error.code == code_pb2.CANCELLED) + + +def _refresh_http(api_request, operation_name): + """Refresh an operation using a JSON/HTTP client. + + Args: + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + operation_name (str): The name of the operation. + + Returns: + google.longrunning.operations_pb2.Operation: The operation. + """ + path = 'operations/{}'.format(operation_name) + api_response = api_request(method='GET', path=path) + return json_format.ParseDict( + api_response, operations_pb2.Operation()) + + +def _cancel_http(api_request, operation_name): + """Cancel an operation using a JSON/HTTP client. + + Args: + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + operation_name (str): The name of the operation. + """ + path = 'operations/{}:cancel'.format(operation_name) + api_request(method='POST', path=path) + + +def from_http_json(operation, api_request, result_type, **kwargs): + """Create an operation future from using a HTTP/JSON client. + + This interacts with the long-running operations `service`_ (specific + to a given API) vis `HTTP/JSON`_. + + .. _HTTP/JSON: https://cloud.google.com/speech/reference/rest/\ + v1beta1/operations#Operation + + Args: + operation (dict): Operation as a dictionary. + api_request (Callable): A callable used to make an API request. This + should generally be + :meth:`google.cloud._http.Connection.api_request`. + result_type (type): The protobuf result type. + kwargs: Keyword args passed into the :class:`Operation` constructor. + + Returns: + Operation: The operation future to track the given operation. + """ + operation_proto = json_format.ParseDict( + operation, operations_pb2.Operation()) + refresh = functools.partial( + _refresh_http, api_request, operation_proto.name) + cancel = functools.partial( + _cancel_http, api_request, operation_proto.name) + return Operation(operation_proto, refresh, cancel, result_type, **kwargs) + + +def _refresh_grpc(operations_stub, operation_name): + """Refresh an operation using a gRPC client. + + Args: + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The gRPC operations stub. + operation_name (str): The name of the operation. + + Returns: + google.longrunning.operations_pb2.Operation: The operation. + """ + request_pb = operations_pb2.GetOperationRequest(name=operation_name) + return operations_stub.GetOperation(request_pb) + + +def _cancel_grpc(operations_stub, operation_name): + """Cancel an operation using a gRPC client. + + Args: + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The gRPC operations stub. + operation_name (str): The name of the operation. + """ + request_pb = operations_pb2.CancelOperationRequest(name=operation_name) + operations_stub.CancelOperation(request_pb) + + +def from_grpc(operation, operations_stub, result_type, **kwargs): + """Create an operation future from using a gRPC client. + + This interacts with the long-running operations `service`_ (specific + to a given API) via gRPC. + + .. _service: https://github.com/googleapis/googleapis/blob/\ + 050400df0fdb16f63b63e9dee53819044bffc857/\ + google/longrunning/operations.proto#L38 + + Args: + operation (google.longrunning.operations_pb2.Operation): The operation. + operations_stub (google.longrunning.operations_pb2.OperationsStub): + The operations stub. + result_type (type): The protobuf result type. + kwargs: Keyword args passed into the :class:`Operation` constructor. + + Returns: + Operation: The operation future to track the given operation. + """ + refresh = functools.partial( + _refresh_grpc, operations_stub, operation.name) + cancel = functools.partial( + _cancel_grpc, operations_stub, operation.name) + return Operation(operation, refresh, cancel, result_type, **kwargs) diff --git a/core/setup.py b/core/setup.py index cd461c5f2526..ba84f2347d18 100644 --- a/core/setup.py +++ b/core/setup.py @@ -57,6 +57,7 @@ 'google-auth >= 0.4.0, < 2.0.0dev', 'google-auth-httplib2', 'six', + 'tenacity >= 4.0.0, <5.0.0dev' ] setup( diff --git a/core/tests/unit/future/test_base.py b/core/tests/unit/future/test_base.py index f10c10b24fb4..69a0348e68d9 100644 --- a/core/tests/unit/future/test_base.py +++ b/core/tests/unit/future/test_base.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import concurrent.futures import threading +import time import mock import pytest @@ -21,8 +23,8 @@ class PollingFutureImpl(base.PollingFuture): - def _blocking_poll(self, timeout=None): # pragma: NO COVER - pass + def done(self): + return False def cancel(self): return True @@ -30,9 +32,6 @@ def cancel(self): def cancelled(self): return False - def done(self): - return False - def running(self): return True @@ -87,13 +86,11 @@ def __init__(self): self.poll_count = 0 self.event = threading.Event() - def _blocking_poll(self, timeout=None): - if self._result_set: - return - + def done(self): self.poll_count += 1 self.event.wait() self.set_result(42) + return True def test_result_with_polling(): @@ -109,6 +106,18 @@ def test_result_with_polling(): assert future.poll_count == 1 +class PollingFutureImplTimeout(PollingFutureImplWithPoll): + def done(self): + time.sleep(1) + return False + + +def test_result_timeout(): + future = PollingFutureImplTimeout() + with pytest.raises(concurrent.futures.TimeoutError): + future.result(timeout=1) + + def test_callback_background_thread(): future = PollingFutureImplWithPoll() callback = mock.Mock() @@ -116,6 +125,9 @@ def test_callback_background_thread(): future.add_done_callback(callback) assert future._polling_thread is not None + + # Give the thread a second to poll + time.sleep(1) assert future.poll_count == 1 future.event.set() diff --git a/core/tests/unit/future/test_operation.py b/core/tests/unit/future/test_operation.py new file mode 100644 index 000000000000..0e29aa687ee6 --- /dev/null +++ b/core/tests/unit/future/test_operation.py @@ -0,0 +1,207 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import mock + +from google.cloud.future import operation +from google.longrunning import operations_pb2 +from google.protobuf import struct_pb2 +from google.rpc import code_pb2 +from google.rpc import status_pb2 + +TEST_OPERATION_NAME = 'test/operation' + + +def make_operation_proto( + name=TEST_OPERATION_NAME, metadata=None, response=None, + error=None, **kwargs): + operation_proto = operations_pb2.Operation( + name=name, **kwargs) + + if metadata is not None: + operation_proto.metadata.Pack(metadata) + + if response is not None: + operation_proto.response.Pack(response) + + if error is not None: + operation_proto.error.CopyFrom(error) + + return operation_proto + + +def make_operation_future(client_operations_responses=None): + if client_operations_responses is None: + client_operations_responses = [make_operation_proto()] + + refresh = mock.Mock( + spec=['__call__'], side_effect=client_operations_responses) + refresh.responses = client_operations_responses + cancel = mock.Mock(spec=['__call__']) + operation_future = operation.Operation( + client_operations_responses[0], + refresh, + cancel, + result_type=struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + return operation_future, refresh, cancel + + +def test_constructor(): + future, refresh, cancel = make_operation_future() + + assert future.operation == refresh.responses[0] + assert future.operation.done is False + assert future.operation.name == TEST_OPERATION_NAME + assert future.metadata is None + assert future.running() + + +def test_metadata(): + expected_metadata = struct_pb2.Struct() + future, _, _ = make_operation_future( + [make_operation_proto(metadata=expected_metadata)]) + + assert future.metadata == expected_metadata + + +def test_cancellation(): + responses = [ + make_operation_proto(), + # Second response indicates that the operation was cancelled. + make_operation_proto( + done=True, + error=status_pb2.Status(code=code_pb2.CANCELLED))] + future, _, cancel = make_operation_future(responses) + + assert future.cancel() + assert future.cancelled() + cancel.assert_called_once_with() + + # Cancelling twice should have no effect. + assert not future.cancel() + cancel.assert_called_once_with() + + +def test_result(): + expected_result = struct_pb2.Struct() + responses = [ + make_operation_proto(), + # Second operation response includes the result. + make_operation_proto(done=True, response=expected_result)] + future, _, _ = make_operation_future(responses) + + result = future.result() + + assert result == expected_result + assert future.done() + + +def test_exception(): + expected_exception = status_pb2.Status(message='meep') + responses = [ + make_operation_proto(), + # Second operation response includes the error. + make_operation_proto(done=True, error=expected_exception)] + future, _, _ = make_operation_future(responses) + + exception = future.exception() + + assert expected_exception.message in '{!r}'.format(exception) + + +def test_unexpected_result(): + responses = [ + make_operation_proto(), + # Second operation response is done, but has not error or response. + make_operation_proto(done=True)] + future, _, _ = make_operation_future(responses) + + exception = future.exception() + + assert 'Unexpected state' in '{!r}'.format(exception) + + +def test__refresh_http(): + api_request = mock.Mock( + return_value={'name': TEST_OPERATION_NAME, 'done': True}) + + result = operation._refresh_http(api_request, TEST_OPERATION_NAME) + + assert result.name == TEST_OPERATION_NAME + assert result.done is True + api_request.assert_called_once_with( + method='GET', path='operations/{}'.format(TEST_OPERATION_NAME)) + + +def test__cancel_http(): + api_request = mock.Mock() + + operation._cancel_http(api_request, TEST_OPERATION_NAME) + + api_request.assert_called_once_with( + method='POST', path='operations/{}:cancel'.format(TEST_OPERATION_NAME)) + + +def test_from_http_json(): + operation_json = {'name': TEST_OPERATION_NAME, 'done': True} + api_request = mock.sentinel.api_request + + future = operation.from_http_json( + operation_json, api_request, struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + assert future._result_type == struct_pb2.Struct + assert future._metadata_type == struct_pb2.Struct + assert future.operation.name == TEST_OPERATION_NAME + assert future.done + + +def test__refresh_grpc(): + operations_stub = mock.Mock(spec=['GetOperation']) + expected_result = make_operation_proto(done=True) + operations_stub.GetOperation.return_value = expected_result + + result = operation._refresh_grpc(operations_stub, TEST_OPERATION_NAME) + + assert result == expected_result + expected_request = operations_pb2.GetOperationRequest( + name=TEST_OPERATION_NAME) + operations_stub.GetOperation.assert_called_once_with(expected_request) + + +def test__cancel_grpc(): + operations_stub = mock.Mock(spec=['CancelOperation']) + + operation._cancel_grpc(operations_stub, TEST_OPERATION_NAME) + + expected_request = operations_pb2.CancelOperationRequest( + name=TEST_OPERATION_NAME) + operations_stub.CancelOperation.assert_called_once_with(expected_request) + + +def test_from_grpc(): + operation_proto = make_operation_proto(done=True) + operations_stub = mock.sentinel.operations_stub + + future = operation.from_grpc( + operation_proto, operations_stub, struct_pb2.Struct, + metadata_type=struct_pb2.Struct) + + assert future._result_type == struct_pb2.Struct + assert future._metadata_type == struct_pb2.Struct + assert future.operation.name == TEST_OPERATION_NAME + assert future.done From 9e23bf18bd28f3a952a1788ebe256383eef532b7 Mon Sep 17 00:00:00 2001 From: florencep Date: Wed, 19 Jul 2017 10:34:54 -0700 Subject: [PATCH 25/62] update the documentation link (#3630) due to the change of the Python Client library doc link --- translate/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/translate/README.rst b/translate/README.rst index a85374ff5298..47ecc3b553d2 100644 --- a/translate/README.rst +++ b/translate/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Translation - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/translate-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/translate/usage.html Quick Start ----------- From 67c0f626f1b244676e2c3dc93fb32a632037e3f9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 19 Jul 2017 14:44:52 -0700 Subject: [PATCH 26/62] Using assertIs in unit tests where appropriate. (#3629) * Using assertIs in unit tests where appropriate. Any usage of `self.assertTrue(a is b)` has become `self.assertIs(a, b)`. * Converting some assertFalse(a is b) to assertIsNot(a, b). --- spanner/tests/unit/test__helpers.py | 2 +- spanner/tests/unit/test_batch.py | 4 +-- spanner/tests/unit/test_client.py | 16 ++++++------ spanner/tests/unit/test_database.py | 36 +++++++++++++------------- spanner/tests/unit/test_instance.py | 16 ++++++------ spanner/tests/unit/test_session.py | 14 +++++----- spanner/tests/unit/test_snapshot.py | 12 ++++----- spanner/tests/unit/test_streamed.py | 8 +++--- spanner/tests/unit/test_transaction.py | 2 +- speech/tests/unit/test_client.py | 4 +-- 10 files changed, 57 insertions(+), 57 deletions(-) diff --git a/spanner/tests/unit/test__helpers.py b/spanner/tests/unit/test__helpers.py index 172c3343cba0..beb5ed7b6bac 100644 --- a/spanner/tests/unit/test__helpers.py +++ b/spanner/tests/unit/test__helpers.py @@ -512,7 +512,7 @@ def _make_one(self, session): def test_ctor(self): session = object() base = self._make_one(session) - self.assertTrue(base._session is session) + self.assertIs(base._session, session) class Test_options_with_prefix(unittest.TestCase): diff --git a/spanner/tests/unit/test_batch.py b/spanner/tests/unit/test_batch.py index ad4cbc872a1e..cf65fdd7e4f5 100644 --- a/spanner/tests/unit/test_batch.py +++ b/spanner/tests/unit/test_batch.py @@ -65,7 +65,7 @@ def _compare_values(self, result, source): def test_ctor(self): session = _Session() base = self._make_one(session) - self.assertTrue(base._session is session) + self.assertIs(base._session, session) self.assertEqual(len(base._mutations), 0) def test__check_state_virtual(self): @@ -177,7 +177,7 @@ def _getTargetClass(self): def test_ctor(self): session = _Session() batch = self._make_one(session) - self.assertTrue(batch._session is session) + self.assertIs(batch._session, session) def test_commit_already_committed(self): from google.cloud.spanner.keyset import KeySet diff --git a/spanner/tests/unit/test_client.py b/spanner/tests/unit/test_client.py index 98e916d8927d..c71429c22535 100644 --- a/spanner/tests/unit/test_client.py +++ b/spanner/tests/unit/test_client.py @@ -60,7 +60,7 @@ def _constructor_test_helper(self, expected_scopes, creds, expected_creds = expected_creds or creds.with_scopes.return_value self.assertIs(client._credentials, expected_creds) - self.assertTrue(client._credentials is expected_creds) + self.assertIs(client._credentials, expected_creds) if expected_scopes is not None: creds.with_scopes.assert_called_once_with(expected_scopes) @@ -162,7 +162,7 @@ def __init__(self, *args, **kwargs): self.assertTrue(isinstance(api, _Client)) again = client.instance_admin_api - self.assertTrue(again is api) + self.assertIs(again, api) self.assertEqual(api.kwargs['lib_name'], 'gccl') self.assertIs(api.kwargs['credentials'], client.credentials) @@ -183,7 +183,7 @@ def __init__(self, *args, **kwargs): self.assertTrue(isinstance(api, _Client)) again = client.database_admin_api - self.assertTrue(again is api) + self.assertIs(again, api) self.assertEqual(api.kwargs['lib_name'], 'gccl') self.assertIs(api.kwargs['credentials'], client.credentials) @@ -202,7 +202,7 @@ def test_copy(self): def test_credentials_property(self): credentials = _Credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) - self.assertTrue(client.credentials is credentials) + self.assertIs(client.credentials, credentials) def test_project_name_property(self): credentials = _Credentials() @@ -236,7 +236,7 @@ def test_list_instance_configs_wo_paging(self): project, page_size, options = api._listed_instance_configs self.assertEqual(project, self.PATH) self.assertEqual(page_size, None) - self.assertTrue(options.page_token is INITIAL_PAGE) + self.assertIs(options.page_token, INITIAL_PAGE) self.assertEqual( options.kwargs['metadata'], [('google-cloud-resource-prefix', client.project_name)]) @@ -292,7 +292,7 @@ def test_instance_factory_defaults(self): self.assertIsNone(instance.configuration_name) self.assertEqual(instance.display_name, self.INSTANCE_ID) self.assertEqual(instance.node_count, DEFAULT_NODE_COUNT) - self.assertTrue(instance._client is client) + self.assertIs(instance._client, client) def test_instance_factory_explicit(self): from google.cloud.spanner.instance import Instance @@ -309,7 +309,7 @@ def test_instance_factory_explicit(self): self.assertEqual(instance.configuration_name, self.CONFIGURATION_NAME) self.assertEqual(instance.display_name, self.DISPLAY_NAME) self.assertEqual(instance.node_count, self.NODE_COUNT) - self.assertTrue(instance._client is client) + self.assertIs(instance._client, client) def test_list_instances_wo_paging(self): from google.cloud._testing import _GAXPageIterator @@ -342,7 +342,7 @@ def test_list_instances_wo_paging(self): self.assertEqual(project, self.PATH) self.assertEqual(filter_, 'name:TEST') self.assertEqual(page_size, None) - self.assertTrue(options.page_token is INITIAL_PAGE) + self.assertIs(options.page_token, INITIAL_PAGE) self.assertEqual( options.kwargs['metadata'], [('google-cloud-resource-prefix', client.project_name)]) diff --git a/spanner/tests/unit/test_database.py b/spanner/tests/unit/test_database.py index 5369a6f2c0d1..5200a0ab7d1b 100644 --- a/spanner/tests/unit/test_database.py +++ b/spanner/tests/unit/test_database.py @@ -50,7 +50,7 @@ def test_ctor_defaults(self): database = self._make_one(self.DATABASE_ID, instance) self.assertEqual(database.database_id, self.DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), []) self.assertIsInstance(database._pool, BurstyPool) # BurstyPool does not create sessions during 'bind()'. @@ -61,7 +61,7 @@ def test_ctor_w_explicit_pool(self): pool = _Pool() database = self._make_one(self.DATABASE_ID, instance, pool=pool) self.assertEqual(database.database_id, self.DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), []) self.assertIs(database._pool, pool) self.assertIs(pool._bound, database) @@ -89,7 +89,7 @@ def test_ctor_w_ddl_statements_ok(self): self.DATABASE_ID, instance, ddl_statements=DDL_STATEMENTS, pool=pool) self.assertEqual(database.database_id, self.DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), DDL_STATEMENTS) def test_from_pb_bad_database_name(self): @@ -196,10 +196,10 @@ def _mock_spanner_client(*args, **kwargs): with _Monkey(MUT, SpannerClient=_mock_spanner_client): api = database.spanner_api - self.assertTrue(api is _client) + self.assertIs(api, _client) # API instance is cached again = database.spanner_api - self.assertTrue(again is api) + self.assertIs(again, api) def test___eq__(self): instance = _Instance(self.INSTANCE_NAME) @@ -567,8 +567,8 @@ def test_session_factory(self): session = database.session() self.assertTrue(isinstance(session, Session)) - self.assertTrue(session.session_id is None) - self.assertTrue(session._database is database) + self.assertIs(session.session_id, None) + self.assertIs(session._database, database) def test_execute_sql_defaults(self): QUERY = 'SELECT * FROM employees' @@ -671,7 +671,7 @@ def test_batch(self): checkout = database.batch() self.assertIsInstance(checkout, BatchCheckout) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) def test_snapshot_defaults(self): from google.cloud.spanner.database import SnapshotCheckout @@ -685,7 +685,7 @@ def test_snapshot_defaults(self): checkout = database.snapshot() self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertIsNone(checkout._read_timestamp) self.assertIsNone(checkout._min_read_timestamp) self.assertIsNone(checkout._max_staleness) @@ -707,7 +707,7 @@ def test_snapshot_w_read_timestamp(self): checkout = database.snapshot(read_timestamp=now) self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertEqual(checkout._read_timestamp, now) self.assertIsNone(checkout._min_read_timestamp) self.assertIsNone(checkout._max_staleness) @@ -729,7 +729,7 @@ def test_snapshot_w_min_read_timestamp(self): checkout = database.snapshot(min_read_timestamp=now) self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertIsNone(checkout._read_timestamp) self.assertEqual(checkout._min_read_timestamp, now) self.assertIsNone(checkout._max_staleness) @@ -750,7 +750,7 @@ def test_snapshot_w_max_staleness(self): checkout = database.snapshot(max_staleness=staleness) self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertIsNone(checkout._read_timestamp) self.assertIsNone(checkout._min_read_timestamp) self.assertEqual(checkout._max_staleness, staleness) @@ -771,7 +771,7 @@ def test_snapshot_w_exact_staleness(self): checkout = database.snapshot(exact_staleness=staleness) self.assertIsInstance(checkout, SnapshotCheckout) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertIsNone(checkout._read_timestamp) self.assertIsNone(checkout._min_read_timestamp) self.assertIsNone(checkout._max_staleness) @@ -788,7 +788,7 @@ def _getTargetClass(self): def test_ctor(self): database = _Database(self.DATABASE_NAME) checkout = self._make_one(database) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) def test_context_mgr_success(self): import datetime @@ -865,7 +865,7 @@ def test_ctor_defaults(self): pool.put(session) checkout = self._make_one(database) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertIsNone(checkout._read_timestamp) self.assertIsNone(checkout._min_read_timestamp) self.assertIsNone(checkout._max_staleness) @@ -891,7 +891,7 @@ def test_ctor_w_read_timestamp(self): pool.put(session) checkout = self._make_one(database, read_timestamp=now) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertEqual(checkout._read_timestamp, now) self.assertIsNone(checkout._min_read_timestamp) self.assertIsNone(checkout._max_staleness) @@ -918,7 +918,7 @@ def test_ctor_w_min_read_timestamp(self): pool.put(session) checkout = self._make_one(database, min_read_timestamp=now) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertIsNone(checkout._read_timestamp) self.assertEqual(checkout._min_read_timestamp, now) self.assertIsNone(checkout._max_staleness) @@ -944,7 +944,7 @@ def test_ctor_w_max_staleness(self): pool.put(session) checkout = self._make_one(database, max_staleness=staleness) - self.assertTrue(checkout._database is database) + self.assertIs(checkout._database, database) self.assertIsNone(checkout._read_timestamp) self.assertIsNone(checkout._min_read_timestamp) self.assertEqual(checkout._max_staleness, staleness) diff --git a/spanner/tests/unit/test_instance.py b/spanner/tests/unit/test_instance.py index b556a0396f01..d86c611b3ccb 100644 --- a/spanner/tests/unit/test_instance.py +++ b/spanner/tests/unit/test_instance.py @@ -50,8 +50,8 @@ def test_constructor_defaults(self): client = object() instance = self._make_one(self.INSTANCE_ID, client) self.assertEqual(instance.instance_id, self.INSTANCE_ID) - self.assertTrue(instance._client is client) - self.assertTrue(instance.configuration_name is None) + self.assertIs(instance._client, client) + self.assertIs(instance.configuration_name, None) self.assertEqual(instance.node_count, DEFAULT_NODE_COUNT) self.assertEqual(instance.display_name, self.INSTANCE_ID) @@ -64,7 +64,7 @@ def test_constructor_non_default(self): node_count=self.NODE_COUNT, display_name=DISPLAY_NAME) self.assertEqual(instance.instance_id, self.INSTANCE_ID) - self.assertTrue(instance._client is client) + self.assertIs(instance._client, client) self.assertEqual(instance.configuration_name, self.CONFIG_NAME) self.assertEqual(instance.node_count, self.NODE_COUNT) self.assertEqual(instance.display_name, DISPLAY_NAME) @@ -78,10 +78,10 @@ def test_copy(self): new_instance = instance.copy() # Make sure the client copy succeeded. - self.assertFalse(new_instance._client is client) + self.assertIsNot(new_instance._client, client) self.assertEqual(new_instance._client, client) # Make sure the client got copied to a new instance. - self.assertFalse(instance is new_instance) + self.assertIsNot(instance, new_instance) self.assertEqual(instance, new_instance) def test__update_from_pb_success(self): @@ -496,7 +496,7 @@ def test_database_factory_defaults(self): self.assertTrue(isinstance(database, Database)) self.assertEqual(database.database_id, DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), []) self.assertIsInstance(database._pool, BurstyPool) pool = database._pool @@ -516,7 +516,7 @@ def test_database_factory_explicit(self): self.assertTrue(isinstance(database, Database)) self.assertEqual(database.database_id, DATABASE_ID) - self.assertTrue(database._instance is instance) + self.assertIs(database._instance, instance) self.assertEqual(list(database.ddl_statements), DDL_STATEMENTS) self.assertIs(database._pool, pool) self.assertIs(pool._bound, database) @@ -547,7 +547,7 @@ def test_list_databases_wo_paging(self): instance_name, page_size, options = api._listed_databases self.assertEqual(instance_name, self.INSTANCE_NAME) self.assertEqual(page_size, None) - self.assertTrue(options.page_token is INITIAL_PAGE) + self.assertIs(options.page_token, INITIAL_PAGE) self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', instance.name)]) diff --git a/spanner/tests/unit/test_session.py b/spanner/tests/unit/test_session.py index 5f75d471a7cf..ce9f81eccc7a 100644 --- a/spanner/tests/unit/test_session.py +++ b/spanner/tests/unit/test_session.py @@ -39,8 +39,8 @@ def _make_one(self, *args, **kwargs): def test_constructor(self): database = _Database(self.DATABASE_NAME) session = self._make_one(database) - self.assertTrue(session.session_id is None) - self.assertTrue(session._database is database) + self.assertIs(session.session_id, None) + self.assertIs(session._database, database) def test___lt___(self): database = _Database(self.DATABASE_NAME) @@ -223,7 +223,7 @@ def test_snapshot_created(self): snapshot = session.snapshot() self.assertIsInstance(snapshot, Snapshot) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertTrue(snapshot._strong) def test_read_not_created(self): @@ -352,7 +352,7 @@ def test_batch_created(self): batch = session.batch() self.assertIsInstance(batch, Batch) - self.assertTrue(batch._session is session) + self.assertIs(batch._session, session) def test_transaction_not_created(self): database = _Database(self.DATABASE_NAME) @@ -371,8 +371,8 @@ def test_transaction_created(self): transaction = session.transaction() self.assertIsInstance(transaction, Transaction) - self.assertTrue(transaction._session is session) - self.assertTrue(session._transaction is transaction) + self.assertIs(transaction._session, session) + self.assertIs(session._transaction, transaction) def test_transaction_w_existing_txn(self): database = _Database(self.DATABASE_NAME) @@ -382,7 +382,7 @@ def test_transaction_w_existing_txn(self): existing = session.transaction() another = session.transaction() # invalidates existing txn - self.assertTrue(session._transaction is another) + self.assertIs(session._transaction, another) self.assertTrue(existing._rolled_back) def test_retry_transaction_w_commit_error_txn_already_begun(self): diff --git a/spanner/tests/unit/test_snapshot.py b/spanner/tests/unit/test_snapshot.py index cf1abce94f45..c5213dbd6cda 100644 --- a/spanner/tests/unit/test_snapshot.py +++ b/spanner/tests/unit/test_snapshot.py @@ -66,7 +66,7 @@ def _make_txn_selector(self): def test_ctor(self): session = _Session() base = self._make_one(session) - self.assertTrue(base._session is session) + self.assertIs(base._session, session) def test__make_txn_selector_virtual(self): session = _Session() @@ -320,7 +320,7 @@ def _makeDuration(self, seconds=1, microseconds=0): def test_ctor_defaults(self): session = _Session() snapshot = self._make_one(session) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertTrue(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertIsNone(snapshot._min_read_timestamp) @@ -340,7 +340,7 @@ def test_ctor_w_read_timestamp(self): timestamp = self._makeTimestamp() session = _Session() snapshot = self._make_one(session, read_timestamp=timestamp) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertFalse(snapshot._strong) self.assertEqual(snapshot._read_timestamp, timestamp) self.assertIsNone(snapshot._min_read_timestamp) @@ -351,7 +351,7 @@ def test_ctor_w_min_read_timestamp(self): timestamp = self._makeTimestamp() session = _Session() snapshot = self._make_one(session, min_read_timestamp=timestamp) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertFalse(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertEqual(snapshot._min_read_timestamp, timestamp) @@ -362,7 +362,7 @@ def test_ctor_w_max_staleness(self): duration = self._makeDuration() session = _Session() snapshot = self._make_one(session, max_staleness=duration) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertFalse(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertIsNone(snapshot._min_read_timestamp) @@ -373,7 +373,7 @@ def test_ctor_w_exact_staleness(self): duration = self._makeDuration() session = _Session() snapshot = self._make_one(session, exact_staleness=duration) - self.assertTrue(snapshot._session is session) + self.assertIs(snapshot._session, session) self.assertFalse(snapshot._strong) self.assertIsNone(snapshot._read_timestamp) self.assertIsNone(snapshot._min_read_timestamp) diff --git a/spanner/tests/unit/test_streamed.py b/spanner/tests/unit/test_streamed.py index 3300e4048cc7..edcace273f66 100644 --- a/spanner/tests/unit/test_streamed.py +++ b/spanner/tests/unit/test_streamed.py @@ -561,7 +561,7 @@ def test_consume_next_first_set_partial(self): streamed.consume_next() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertTrue(streamed.metadata is metadata) + self.assertIs(streamed.metadata, metadata) self.assertEqual(streamed.resume_token, result_set.resume_token) def test_consume_next_w_partial_result(self): @@ -630,7 +630,7 @@ def test_consume_next_last_set(self): streamed.consume_next() self.assertEqual(streamed.rows, [BARE]) self.assertEqual(streamed._current_row, []) - self.assertTrue(streamed._stats is stats) + self.assertIs(streamed._stats, stats) self.assertEqual(streamed.resume_token, result_set.resume_token) def test_consume_all_empty(self): @@ -653,7 +653,7 @@ def test_consume_all_one_result_set_partial(self): streamed.consume_all() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertTrue(streamed.metadata is metadata) + self.assertIs(streamed.metadata, metadata) def test_consume_all_multiple_result_sets_filled(self): FIELDS = [ @@ -703,7 +703,7 @@ def test___iter___one_result_set_partial(self): self.assertEqual(found, []) self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertTrue(streamed.metadata is metadata) + self.assertIs(streamed.metadata, metadata) def test___iter___multiple_result_sets_filled(self): FIELDS = [ diff --git a/spanner/tests/unit/test_transaction.py b/spanner/tests/unit/test_transaction.py index bdb8d20b8f01..997f4d5153c8 100644 --- a/spanner/tests/unit/test_transaction.py +++ b/spanner/tests/unit/test_transaction.py @@ -48,7 +48,7 @@ def _make_one(self, *args, **kwargs): def test_ctor_defaults(self): session = _Session() transaction = self._make_one(session) - self.assertTrue(transaction._session is session) + self.assertIs(transaction._session, session) self.assertIsNone(transaction._id) self.assertIsNone(transaction.committed) self.assertEqual(transaction._rolled_back, False) diff --git a/speech/tests/unit/test_client.py b/speech/tests/unit/test_client.py index 259df66b0a3d..b66d3080e066 100644 --- a/speech/tests/unit/test_client.py +++ b/speech/tests/unit/test_client.py @@ -88,8 +88,8 @@ def test_ctor(self): creds = _make_credentials() http = object() client = self._make_one(credentials=creds, _http=http) - self.assertTrue(client._credentials is creds) - self.assertTrue(client._http is http) + self.assertIs(client._credentials, creds) + self.assertIs(client._http, http) def test_ctor_use_grpc_preset(self): creds = _make_credentials() From 6af2c4ff76ed3fbc5061d5d888c9e7355351915e Mon Sep 17 00:00:00 2001 From: florencep Date: Wed, 19 Jul 2017 14:58:03 -0700 Subject: [PATCH 27/62] update documentation link (#3633) since it moved to a new URL https://googlecloudplatform.github.io/google-cloud-python/stable/speech/usage.html --- speech/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/speech/README.rst b/speech/README.rst index aeec14e484a3..663555b52db3 100644 --- a/speech/README.rst +++ b/speech/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Speech - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech/usage.html Quick Start ----------- From 957e4dd4d41b8ac485ed29951db955c00b8b1aa4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 19 Jul 2017 14:58:17 -0700 Subject: [PATCH 28/62] Fixing references to "dead" docs links. (#3631) * Fixing references to "dead" docs links. Done via: $ git grep -l 'google-cloud-auth.html' | \ > xargs sed -i s/'google-cloud-auth.html'/'core\/auth.html'/g $ git grep -l 'http\:\/\/google-cloud-python.readthedocs.io' | \ > xargs sed -i s/'http\:\/\/google-cloud-python.readthedocs.io'/\ > 'https\:\/\/google-cloud-python.readthedocs.io'/g Fixes #3531. * Fixing up other docs that were moved in #3459. --- bigquery/README.rst | 4 ++-- bigtable/README.rst | 4 ++-- core/README.rst | 2 +- core/google/cloud/credentials.py | 2 +- datastore/README.rst | 6 +++--- dns/README.rst | 4 ++-- error_reporting/README.rst | 4 ++-- language/README.rst | 4 ++-- logging/README.rst | 6 +++--- monitoring/README.rst | 6 +++--- pubsub/README.rst | 6 +++--- resource_manager/README.rst | 6 +++--- runtimeconfig/README.rst | 2 +- spanner/README.rst | 2 +- speech/README.rst | 4 ++-- storage/README.rst | 6 +++--- storage/google/cloud/storage/bucket.py | 2 +- translate/README.rst | 2 +- videointelligence/README.rst | 6 +----- vision/README.rst | 4 ++-- 20 files changed, 39 insertions(+), 43 deletions(-) diff --git a/bigquery/README.rst b/bigquery/README.rst index 97a94366a49a..bf5bc55f1fa4 100644 --- a/bigquery/README.rst +++ b/bigquery/README.rst @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -86,7 +86,7 @@ Perform a synchronous query See the ``google-cloud-python`` API `BigQuery documentation`_ to learn how to connect to BigQuery using this Client Library. -.. _BigQuery documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery-usage.html +.. _BigQuery documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery.svg :target: https://pypi.python.org/pypi/google-cloud-bigquery diff --git a/bigtable/README.rst b/bigtable/README.rst index 3b37f5ec6880..3385b882c28f 100644 --- a/bigtable/README.rst +++ b/bigtable/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Bigtable - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigtable-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigtable/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API diff --git a/core/README.rst b/core/README.rst index 5088505addc7..e9e7e19278ce 100644 --- a/core/README.rst +++ b/core/README.rst @@ -9,7 +9,7 @@ used by all of the ``google-cloud-*``. - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/google-cloud-api.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/core/modules.html Quick Start ----------- diff --git a/core/google/cloud/credentials.py b/core/google/cloud/credentials.py index 6a1bf512f7a9..e5fe30245ea5 100644 --- a/core/google/cloud/credentials.py +++ b/core/google/cloud/credentials.py @@ -60,7 +60,7 @@ def _get_signed_query_params(credentials, expiration, string_to_sign): signed payload. """ if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' + auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' 'core/auth.html?highlight=authentication#setting-up-' 'a-service-account') raise AttributeError('you need a private key to sign credentials.' diff --git a/datastore/README.rst b/datastore/README.rst index d913abc7821f..dbfc252564ea 100644 --- a/datastore/README.rst +++ b/datastore/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Datastore - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -44,7 +44,7 @@ queries, and eventual consistency for all other queries. See the ``google-cloud-python`` API `datastore documentation`_ to learn how to interact with the Cloud Datastore using this Client Library. -.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore-client.html +.. _datastore documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/datastore/client.html See the `official Google Cloud Datastore documentation`_ for more details on how to activate Cloud Datastore for your project. diff --git a/dns/README.rst b/dns/README.rst index 2e290780b3ed..7f46dce1d617 100644 --- a/dns/README.rst +++ b/dns/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud DNS - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/dns-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/dns/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API diff --git a/error_reporting/README.rst b/error_reporting/README.rst index e968d42754bf..69308b1ce0e2 100644 --- a/error_reporting/README.rst +++ b/error_reporting/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Error Reporting - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/error-reporting-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/error-reporting/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API diff --git a/language/README.rst b/language/README.rst index 5d09b6de0dc6..8685c8925313 100644 --- a/language/README.rst +++ b/language/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Natural Language - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/language-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/language/usage.html Quick Start ----------- @@ -28,7 +28,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API diff --git a/logging/README.rst b/logging/README.rst index 5df19dd1f79a..a706b50079ac 100644 --- a/logging/README.rst +++ b/logging/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Logging - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -54,7 +54,7 @@ Example of fetching entries: See the ``google-cloud-python`` API `logging documentation`_ to learn how to connect to Stackdriver Logging using this Client Library. -.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging-usage.html +.. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg :target: https://pypi.python.org/pypi/google-cloud-logging diff --git a/monitoring/README.rst b/monitoring/README.rst index 6c4889fb3925..4debab64a3ea 100644 --- a/monitoring/README.rst +++ b/monitoring/README.rst @@ -9,7 +9,7 @@ Python Client for Stackdriver Monitoring - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -64,7 +64,7 @@ Display CPU utilization across your GCE instances during the last five minutes: See the ``google-cloud-python`` API `monitoring documentation`_ to learn how to connect to Stackdriver Monitoring using this Client Library. -.. _monitoring documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring-usage.html +.. _monitoring documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-monitoring.svg :target: https://pypi.python.org/pypi/google-cloud-monitoring diff --git a/pubsub/README.rst b/pubsub/README.rst index 6bf9d77ee82e..472b74eb1bf0 100644 --- a/pubsub/README.rst +++ b/pubsub/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Pub / Sub - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -45,7 +45,7 @@ independently written applications. See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect to Cloud Pub/Sub using this Client Library. -.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html +.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html To get started with this API, you'll need to create diff --git a/resource_manager/README.rst b/resource_manager/README.rst index 6d7482690273..f0e67ca4750e 100644 --- a/resource_manager/README.rst +++ b/resource_manager/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Resource Manager - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager-api.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager/api.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -42,7 +42,7 @@ Google Cloud Platform. See the ``google-cloud-python`` API `Resource Manager documentation`_ to learn how to manage projects using this Client Library. -.. _Resource Manager documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager-api.html +.. _Resource Manager documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager/api.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-resource-manager.svg :target: https://pypi.python.org/pypi/google-cloud-resource-manager diff --git a/runtimeconfig/README.rst b/runtimeconfig/README.rst index dcf71476fd41..b8d79b3fd3d3 100644 --- a/runtimeconfig/README.rst +++ b/runtimeconfig/README.rst @@ -30,7 +30,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API diff --git a/spanner/README.rst b/spanner/README.rst index fedabfb50fef..1580c27a71a0 100644 --- a/spanner/README.rst +++ b/spanner/README.rst @@ -3,7 +3,7 @@ Python Client for Cloud Spanner Python idiomatic client for `Cloud Spanner`_. -.. _Cloud Spanner: https://googlecloudplatform.github.io/google-cloud-python/latest/spanner-usage.html +.. _Cloud Spanner: https://googlecloudplatform.github.io/google-cloud-python/latest/spanner/usage.html Quick Start diff --git a/speech/README.rst b/speech/README.rst index 663555b52db3..ce67559f09e7 100644 --- a/speech/README.rst +++ b/speech/README.rst @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -41,7 +41,7 @@ and receive a text transcription from the Cloud Speech API service. See the ``google-cloud-python`` API `speech documentation`_ to learn how to connect to the Google Cloud Speech API using this Client Library. -.. _speech documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech-usage.html +.. _speech documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-speech.svg :target: https://pypi.python.org/pypi/google-cloud-speech .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-speech.svg diff --git a/storage/README.rst b/storage/README.rst index 6d55686be9d0..553c377a2be3 100644 --- a/storage/README.rst +++ b/storage/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Storage - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage-client.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage/client.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -43,7 +43,7 @@ via direct download. See the ``google-cloud-python`` API `storage documentation`_ to learn how to connect to Cloud Storage using this Client Library. -.. _storage documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage-client.html +.. _storage documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/storage/client.html You need to create a Google Cloud Storage bucket to use this client library. Follow along with the `official Google Cloud Storage documentation`_ to learn diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index 895a6e38473f..02fa076e9e60 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -1015,7 +1015,7 @@ def generate_upload_policy( credentials = client._base_connection.credentials if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('http://google-cloud-python.readthedocs.io/en/latest/' + auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' 'core/auth.html?highlight=authentication#setting-up-' 'a-service-account') raise AttributeError( diff --git a/translate/README.rst b/translate/README.rst index 47ecc3b553d2..7e56d081cf46 100644 --- a/translate/README.rst +++ b/translate/README.rst @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API diff --git a/videointelligence/README.rst b/videointelligence/README.rst index d3741cd88fc1..e294919b77be 100644 --- a/videointelligence/README.rst +++ b/videointelligence/README.rst @@ -7,10 +7,6 @@ Python Client for Google Cloud Video Intelligence |pypi| |versions| -- `Documentation`_ - -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/video-intelligence-usage.html - Quick Start ----------- @@ -26,7 +22,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API diff --git a/vision/README.rst b/vision/README.rst index f8b5adc2a0b9..0056d714dd5c 100644 --- a/vision/README.rst +++ b/vision/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Vision - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/vision-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/vision/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API From a6791695c01dffd9877aabb5a84debbd45ac5130 Mon Sep 17 00:00:00 2001 From: Till Hoffmann Date: Thu, 20 Jul 2017 00:23:45 +0100 Subject: [PATCH 29/62] Add keyword arguments to google.cloud.storage.Bucket.get_blob. (#3613) --- storage/google/cloud/storage/blob.py | 2 +- storage/google/cloud/storage/bucket.py | 20 +++++++++++++++++--- storage/tests/unit/test_bucket.py | 23 +++++++++++++++++++++++ 3 files changed, 41 insertions(+), 4 deletions(-) diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index de59fdf1f2bd..7d967a3e4901 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -113,7 +113,7 @@ class Blob(_PropertyMixin): :type encryption_key: bytes :param encryption_key: Optional 32 byte encryption key for customer-supplied encryption. - See https://cloud.google.com/storage/docs/encryption#customer-supplied + See https://cloud.google.com/storage/docs/encryption#customer-supplied. """ _chunk_size = None # Default value for each instance. diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index 02fa076e9e60..35ba59337490 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -34,6 +34,7 @@ from google.cloud.storage.acl import BucketACL from google.cloud.storage.acl import DefaultObjectACL from google.cloud.storage.blob import Blob +from google.cloud.storage.blob import _get_encryption_headers def _blobs_page_start(iterator, page, response): @@ -228,7 +229,7 @@ def path(self): return self.path_helper(self.name) - def get_blob(self, blob_name, client=None): + def get_blob(self, blob_name, client=None, encryption_key=None, **kwargs): """Get a blob object by name. This will return None if the blob doesn't exist: @@ -245,14 +246,27 @@ def get_blob(self, blob_name, client=None): :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the current bucket. + :type encryption_key: bytes + :param encryption_key: + Optional 32 byte encryption key for customer-supplied encryption. + See + https://cloud.google.com/storage/docs/encryption#customer-supplied. + + :type kwargs: dict + :param kwargs: Keyword arguments to pass to the + :class:`~google.cloud.storage.blob.Blob` constructor. + :rtype: :class:`google.cloud.storage.blob.Blob` or None :returns: The blob object if it exists, otherwise None. """ client = self._require_client(client) - blob = Blob(bucket=self, name=blob_name) + blob = Blob(bucket=self, name=blob_name, encryption_key=encryption_key, + **kwargs) try: + headers = _get_encryption_headers(encryption_key) response = client._connection.api_request( - method='GET', path=blob.path, _target_object=blob) + method='GET', path=blob.path, _target_object=blob, + headers=headers) # NOTE: We assume response.get('name') matches `blob_name`. blob._set_properties(response) # NOTE: This will not fail immediately in a batch. However, when diff --git a/storage/tests/unit/test_bucket.py b/storage/tests/unit/test_bucket.py index 5e4a91575197..0df94dc5db3d 100644 --- a/storage/tests/unit/test_bucket.py +++ b/storage/tests/unit/test_bucket.py @@ -245,6 +245,29 @@ def test_get_blob_hit(self): self.assertEqual(kw['method'], 'GET') self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) + def test_get_blob_hit_with_kwargs(self): + from google.cloud.storage.blob import _get_encryption_headers + + NAME = 'name' + BLOB_NAME = 'blob-name' + CHUNK_SIZE = 1024 * 1024 + KEY = b'01234567890123456789012345678901' # 32 bytes + + connection = _Connection({'name': BLOB_NAME}) + client = _Client(connection) + bucket = self._make_one(name=NAME) + blob = bucket.get_blob( + BLOB_NAME, client=client, encryption_key=KEY, chunk_size=CHUNK_SIZE + ) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, BLOB_NAME) + kw, = connection._requested + self.assertEqual(kw['method'], 'GET') + self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME)) + self.assertEqual(kw['headers'], _get_encryption_headers(KEY)) + self.assertEqual(blob.chunk_size, CHUNK_SIZE) + self.assertEqual(blob._encryption_key, KEY) + def test_list_blobs_defaults(self): NAME = 'name' connection = _Connection({'items': []}) From 025105e1de179aa2112527816a9f11455e8b7ede Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 09:07:02 -0700 Subject: [PATCH 30/62] Removing JSON docs and unused images. (#3640) --- docs/_static/images/gcp-logo-32x32.png | Bin 1977 -> 0 bytes docs/_static/images/gcp-logo.png | Bin 2334 -> 0 bytes docs/json/json/home.html | 127 ------- docs/json/json/master/index.json | 10 - docs/json/json/master/overview.html | 46 --- docs/json/json/master/toc.json | 451 ------------------------- docs/json/manifest.json | 20 -- 7 files changed, 654 deletions(-) delete mode 100644 docs/_static/images/gcp-logo-32x32.png delete mode 100644 docs/_static/images/gcp-logo.png delete mode 100644 docs/json/json/home.html delete mode 100644 docs/json/json/master/index.json delete mode 100644 docs/json/json/master/overview.html delete mode 100644 docs/json/json/master/toc.json delete mode 100644 docs/json/manifest.json diff --git a/docs/_static/images/gcp-logo-32x32.png b/docs/_static/images/gcp-logo-32x32.png deleted file mode 100644 index 1dcafb042990c31f255a7a5a0cfdcdd388ed0e3a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1977 zcmZ8hc}$yS7=ODX%uTaQ6K6D<88vYx&MBj%lny8@J%A22%-m+w;g!Wd=CycV)2Ykb z`+#Rj^A^>-`A5_V*Udg zAKi!$dO#@P?uBz3c{Z#?h!$&X%tC05D=Q<1*HUHj{kf7Y^-ovCMY`#2@jINCg&u#n z?%BueT*eJvoZIcrz&?k=@oN5JxR>Ygc)~rL0WKZv)>OKM%buK^tljyvd(j)Q3hwmr zTrOAhUeUtB!j0rukb%Ey;bok>4d~~@&Cky-o4Nay9)AZP0^5>fs~ftjdGvwA_-RXL z*C$6qE%b25v#y0d=UhH3gI-5}Zlz;qpphPIOZA!$&5yUw*?v8E?DU9$74GKQIP`Eg z-|2LAC%t6l(xKdvnPX0+AJj~?vSVj2iMGE!SlhV}n>#pXzkWGkSKQ&Axv9ySp}r;# zW1{`aK#9Kn$l?A2ycIj!W%;C!vCCBvXED7ptKMaQH*UWw0Aq?Ji&?V38~OgftclC* zN%r)lRf}kJbhMrQ{C=4SmT*wzoo|f0CS<+z-i%8B^Y0VB*{R{ds!ESFZ2S zTy^$x8_pdW{XTZ+#}}?-Z~soBG#ZcPDEz|DhSR^B3U_fbesCU8`chPGX zBo1-o?c8{=B;*hzMy_+bzThCAWlc*6_!h?J{PQ#RNPp(dH0Iler7VdzSHHCA^F^%u zP!p>!Eulxi5=mEr!JyCYTWCtU#!u|wGHpBtU*cYFVcUd>{y;$d(;pcpZi|jPg~I$Na)GGT~tyn@j$5#!*q)cxb;>wEN4-fV#o|j?VqUiY$7RT-pT`q)s?p;4}C=^O)jM@ZcQ$!mr z($qvLYx1a0HJ9%;ifpHcsj{@jMwlO=qs=`tROfg@0c9w^d7En8f@!}MPat$v`qd1! z5Guw~Mi@>?MuFQPhJD6@jp#|K^c5;(6UKFVvO1MYig6R#hVh3+Z4wwZ%jF+XWp$HU zLLlSIV@d%zdi8^pVT(*w0@JTG8=?eiAM&=*U53Ia0ii7=CA}AS8r0O~QbF+ZngWog zBx?W^-^4x%D-9z@z!lU}QBj~alXQc*eJWgyX5N84c1m#HhFeuU1! zjmhbx3V}|iqiwC$Yyb5lG)5Tfc3uPg4)|idS*up}--{>b7`);#nf$_q3)im?eWumd z8w&r)ALX5+7_!!?w!wq-^>Eey^CR@uNNel(Mex7cIoa9SNj@bCWzVQSFMRlljVMB% Km|Mkpz;CH`GNn?d|PMi}Kvu++3IO{mn)0?(Y4} zLeoSx?d|Q+LpI7pJ>lWuvoIv>@$moANdC`7>+9>jQcCad@6+9?8?(X}_MNEn5 z>FMdvLN)5@>iEV)SCQ`X#Yf@c;ln^Puei&vMK;qsIOEF5$#-w|zc&5FDC_U}=j-y( zH#W64D*DAR^tv|n!$a`KTk7A$=hdcKb*kD(JN?T=)j~G?%SYzs=F2}a(L^@u>+AQ% zLdHWjT9o0!Ju~IJN-1B~v&io=V%F!`&b>M`HDS!>;^E}i&$~i5;Kabwpqa#JVbEDc z`q@3}%6HElIjFF?GbySe^NqN{yf#UtiLQI6_S(Eek#XwVz>`{yG_{Klq-`~|vK<@7D z@9*zVj`jc0O8?D8|IbEKko8fH_5aUE*hV=^i}lq;IZ=@HOpNyL?(Ne?Iq>oD*+@Ls zL^#<*I8KcA+(^tn6RNIU7VInYHp-lI0iK{fRK{`U6vRgv~iiSGT%LG83X-bp;% zLpRYxGufIkzBVcP_V7)G>-4rd=&U(Gg!M*v-TcTv@v}NqlKMSw*Zz%jg z?d9a^+}hr^v(uH2(S?7tL_@e=+Y$f(0E~1}PE!B|{{H^{{V4tY_U-58EGO;a-?g+_ zJUIUR`}Xzq^WV;(m64B&cU@LIBqR+C{`~y>{QLX+`}+C#_xAPj=ElXju%K>RNGuo^ z4F3K6`}_I$`1tqt_xJbq^z!lV?cdSJuC1!2oNZM}NJTps{QLX*_xJYo_4D%Z@bK^M z?(OQ*(7BR?cy)DXNFF&_A-n(p1ye~xK~zY`g_UPm(_j>ZBOneG#l835d+)vX-h1!8 z_ZBOKwn77HEh>lwYN4Q1MFf$lD2k{c0`9%_pC`$eq27~QK#6|_o}$2K^5o{`+HS3`eL#SqE7fXZi>Pze z*Dg!gXl+T!fTRF>cc4_CSl#X%#hVX*ZirN`K+DbcCS6TJIA^C1dvTT~{(VmDJwkZd z=5$-v9$w{iPQDPrx>9%Or*x>X6`P>IoS=T+2*OSr%DxW2)#pG4OrJ8Gk zfrMMUs;8QX9#i~?C=coB_j2>7$B~iOA6|Vuj6}00mwJnwohu&hQzL=s@jr9SW^X~i z+e>V1p`V}Mr0&DH@Yi3?vVzR^aVC$osFsQR&8}6d+30;Y2AsazkF2Z=?5Js@xY^&5 zwpc^GW{gOaS_x!vBVa>64W9RC%5c<)Qb2#XeQ0Ln+plEbrgD%aDwYT#i~F5pGzJrW z0RtLW4OA!;eo>n?W@f%SN204$WUOgmVEJ&<_v|=hu%M?DuYkwu9R(Ts%N@gqej_VO z=tBx2IF~R8v*X-h`$9)}*+NXiDv0L5;vR>bB|SHwzIf$Kt~>_IL`T@X?(bioDGix~ zPZJNE_3F>$rFMdJ?v7!wwK1?$dPp|(!d;90-yW9=8}FGnY$r^v5YcsMXCP(gJ9*{m z#MY=Vxv(`7-lkhov9SEMHollS41;U*dJVQl8YAJIN1qWQYEX@lm&G!=gQGFlR|{L? zgU0x4@$e9zd~#bM++zD$>{=l@n2W~XeaWRl2hcq_c0EI$yq-M!(OM?y`;r~ztJOo+ z#MEp`-jYCthk&AR7ZT1IYBwSBQph}PXk{me2@}AF*%X)IC?BCkbUZV{cWdB}LTU2XR-K_NESAwELqeGv9 zsZK34qD-}A{XHdzVw~^1*Og>30nkO{r~^~07*qoM6N<$ Ef*892)&Kwi diff --git a/docs/json/json/home.html b/docs/json/json/home.html deleted file mode 100644 index 762625468340..000000000000 --- a/docs/json/json/home.html +++ /dev/null @@ -1,127 +0,0 @@ -
-
-
-

google-cloud-python

-

Google Cloud Client Library for Python - - an idiomatic, intuitive, and natural way for Python developers to - integrate with Google Cloud Platform services, like Cloud Datastore - and Cloud Storage.

-
- -
-
$ pip install --upgrade google-cloud
-

- Latest Release {{home.latestRelease.name}} - {{home.latestRelease.date|date}} -

-
-
-
- -
- -
- -
-
-
-

What is it?

- -

google-cloud-python is a client library for accessing Google - Cloud Platform services that significantly reduces the boilerplate - code you have to write. The library provides high-level API - abstractions so they're easier to understand. It embraces - idioms of Python, works well with the standard library, and - integrates better with your codebase. - All this means you spend more time creating code that matters - to you.

- -

google-cloud-python is configured to access Google Cloud Platform - services and authorize (OAuth 2.0) automatically on your behalf. - With a one-line install and a private key, you are up and ready - to go. Better yet, if you are running on a Google Compute Engine - instance, the one-line install is enough!

- -
- -
-

Retrieve Datastore Entities

-
from google.cloud import datastore
-
-client = datastore.Client()
-product_key = client.key('Product', 123)
-print(client.get(product_key))
-
-
-
- -
-
-

Examples

- - -
-
- -
-
-

FAQ

- -

What is the relationship between the google-cloud-python package - and the google-cloud command-line tool?

-

Both the google-cloud command-line tool and - google-cloud-python package are a part of the Google Cloud SDK: a collection - of tools and libraries that enable you to easily create and manage - resources on the Google Cloud Platform. The google-cloud command-line - tool can be used to manage both your development workflow and your - Google Cloud Platform resources while the google-cloud-python package is the - Google Cloud Client Library for Python.

- -

What is the relationship between google-cloud-python - and the Google APIs Python Client?

-

The - Google APIs Python Client is a client library for - using the broad set of Google APIs. - google-cloud-python is built specifically for the Google Cloud Platform - and is the recommended way to integrate Google Cloud APIs into your - Python applications. If your application requires both Google Cloud Platform and - other Google APIs, the 2 libraries may be used by your application.

-
-
diff --git a/docs/json/json/master/index.json b/docs/json/json/master/index.json deleted file mode 100644 index 1526595856f1..000000000000 --- a/docs/json/json/master/index.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "description": "

google-cloud-python

Google Cloud API access in idiomatic Python.

", - "examples": [ - "import google.cloud" - ], - "id": "google.cloud.__init__", - "methods": [], - "name": "__Init__", - "source": "/google/cloud/__init__.py" -} diff --git a/docs/json/json/master/overview.html b/docs/json/json/master/overview.html deleted file mode 100644 index 08d4b32fabe7..000000000000 --- a/docs/json/json/master/overview.html +++ /dev/null @@ -1,46 +0,0 @@ -

Getting started

- -

-The google-cloud library is pip install-able: -

- -
- $ pip install google-cloud -
- -
- -

Cloud Datastore

- -

-Google Cloud Datastore is a fully managed, - schemaless database for storing non-relational data. -

- -
- - from google.cloud import datastore - - client = datastore.Client() - key = client.key('Person') - - entity = datastore.Entity(key=key) - entity['name'] = 'Your name' - entity['age'] = 25 - client.put(entity) -
- -

Cloud Storage

- -

-Google Cloud Storage allows you to store data on Google infrastructure. -

- -
- from google.cloud import storage - - client = storage.Client() - bucket = client.get_bucket('') - blob = bucket.blob('my-test-file.txt') - blob.upload_from_string('this is test content!') -
diff --git a/docs/json/json/master/toc.json b/docs/json/json/master/toc.json deleted file mode 100644 index 2292459a133d..000000000000 --- a/docs/json/json/master/toc.json +++ /dev/null @@ -1,451 +0,0 @@ -{ - "guides": [ - { - "contents": [ - "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/authentication/readme.md", - "authentication.md" - ], - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/authentication/readme.md", - "id": "authentication", - "title": "Authentication" - }, - { - "contents": "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/faq/readme.md", - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/faq/readme.md", - "id": "faq", - "title": "FAQ" - }, - { - "contents": "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/troubleshooting/readme.md", - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/troubleshooting/readme.md", - "id": "troubleshooting", - "title": "Troubleshooting" - }, - { - "contents": "https://raw.githubusercontent.com/GoogleCloudPlatform/gcloud-common/master/contributing/readme.md", - "edit": "https://github.com/GoogleCloudPlatform/gcloud-common/edit/master/contributing/readme.md", - "id": "contributing", - "title": "Contributing" - } - ], - "overview": "overview.html", - "services": [ - { - "nav": [ - { - "title": "Client", - "type": "google/cloud/client/client" - }, - { - "title": "Connection", - "type": "google/cloud/connection/connection" - }, - { - "title": "Credentials", - "type": "google/cloud/credentials" - }, - { - "title": "Environment Variables", - "type": "google/cloud/environment_vars" - }, - { - "title": "Iterator", - "type": "google/cloud/iterator" - } - ], - "title": "gcloud", - "type": "google/cloud/gcloud" - }, - { - "implemented": ">=0.7.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/bigquery/client/client" - }, - { - "title": "Dataset", - "type": "google/cloud/bigquery/dataset/dataset" - }, - { - "title": "Jobs", - "type": "google/cloud/bigquery/job" - }, - { - "title": "Table", - "type": "google/cloud/bigquery/table/table" - }, - { - "title": "Query", - "type": "google/cloud/bigquery/query/query" - } - ], - "title": "BigQuery", - "type": "google/cloud/bigquery" - }, - { - "implemented": ">=0.12.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/bigtable/client/client" - }, - { - "title": "Cluster", - "type": "google/cloud/bigtable/cluster/cluster" - }, - { - "title": "Column Families", - "type": "google/cloud/bigtable/column_family/columnfamily" - }, - { - "title": "Bigtable Row", - "type": "google/cloud/bigtable/row/row" - }, - { - "title": "Bigtable Row Filter", - "type": "google/cloud/bigtable/row_filters/rowfilter" - }, - { - "title": "Table", - "type": "google/cloud/bigtable/table/table" - } - ], - "title": "BigTable", - "type": "google/cloud/bigtable" - }, - { - "implemented": ">=0.3.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/datastore/client/client" - }, - { - "title": "Entity", - "type": "google/cloud/datastore/entity/entity" - }, - { - "title": "Key", - "type": "google/cloud/datastore/key/key" - }, - { - "title": "Query", - "type": "google/cloud/datastore/query/query" - }, - { - "title": "Transaction", - "type": "google/cloud/datastore/transaction/transaction" - }, - { - "title": "Batch", - "type": "google/cloud/datastore/batch/batch" - }, - { - "title": "Helpers", - "type": "datastore/helpers" - } - ], - "title": "Datastore", - "type": "google/cloud/datastore" - }, - { - "implemented": ">=0.8.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/dns/client/client" - }, - { - "title": "Managed Zones", - "type": "google/cloud/dns/zone/managedzone" - }, - { - "title": "Resource Record Set", - "type": "google/cloud/dns/resource_record_set/resourcerecordset" - }, - { - "title": "Change Sets", - "type": "google/cloud/dns/changes/changes" - } - ], - "title": "DNS", - "type": "google/cloud/dns" - }, - { - "nav": [ - { - "title": "Client", - "type": "google/cloud/error_reporting/client/client" - } - ], - "title": "Error Reporting", - "type": "google/cloud/error_reporting" - }, - { - "nav": [ - { - "title": "BadRequest", - "type": "google/cloud/exceptions/badrequest" - }, - { - "title": "ClientError", - "type": "google/cloud/exceptions/clienterror" - }, - { - "title": "Conflict", - "type": "google/cloud/exceptions/conflict" - }, - { - "title": "Forbidden", - "type": "google/cloud/exceptions/forbidden" - }, - { - "title": "GoogleCloudError", - "type": "google/cloud/exceptions/gclouderror" - }, - { - "title": "InternalServerError", - "type": "google/cloud/exceptions/internalservererror" - }, - { - "title": "LengthRequired", - "type": "google/cloud/exceptions/lengthrequired" - }, - { - "title": "MethodNotAllowed", - "type": "google/cloud/exceptions/methodnotallowed" - }, - { - "title": "MethodNotImplemented", - "type": "google/cloud/exceptions/methodnotimplemented" - }, - { - "title": "MovedPermanently", - "type": "google/cloud/exceptions/movedpermanently" - }, - { - "title": "NotFound", - "type": "google/cloud/exceptions/notfound" - }, - { - "title": "NotModified", - "type": "google/cloud/exceptions/notmodified" - }, - { - "title": "PreconditionFailed", - "type": "google/cloud/exceptions/preconditionfailed" - }, - { - "title": "Redirection", - "type": "google/cloud/exceptions/redirection" - }, - { - "title": "RequestRangeNotSatisfiable", - "type": "google/cloud/exceptions/requestrangenotsatisfiable" - }, - { - "title": "ResumeIncomplete", - "type": "google/cloud/exceptions/resumeincomplete" - }, - { - "title": "ServerError", - "type": "google/cloud/exceptions/servererror" - }, - { - "title": "ServiceUnavailable", - "type": "google/cloud/exceptions/serviceunavailable" - }, - { - "title": "TemporaryRedirect", - "type": "google/cloud/exceptions/temporaryredirect" - }, - { - "title": "TooManyRequests", - "type": "google/cloud/exceptions/toomanyrequests" - }, - { - "title": "Unauthorized", - "type": "google/cloud/exceptions/unauthorized" - }, - { - "title": "Exceptions", - "type": "google/cloud/exceptions" - } - ], - "title": "Exceptions", - "type": "google/cloud/exceptions" - }, - { - "implemented": ">=0.12.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/logging/client/client" - }, - { - "title": "Logger", - "type": "google/cloud/logging/logger/logger" - }, - { - "title": "Entries", - "type": "google/cloud/logging/entries" - }, - { - "title": "Metric", - "type": "google/cloud/logging/metric/metric" - }, - { - "title": "Sink", - "type": "google/cloud/logging/sink/sink" - } - ], - "title": "Cloud Logging", - "type": "google/cloud/logging" - }, - { - "implemented": ">=0.12.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/monitoring/client/client" - }, - { - "title": "Metric Descriptors", - "type": "google/cloud/monitoring/metric/metricdescriptor" - }, - { - "title": "Label Descriptors", - "type": "google/cloud/monitoring/label/labeldescriptor" - }, - { - "title": "Monitored Resource Descriptors", - "type": "google/cloud/monitoring/resource/resourcedescriptor" - }, - { - "title": "Time Series Query", - "type": "google/cloud/monitoring/query/query" - }, - { - "title": "Time Series", - "type": "googe/cloud/monitoring/timeseries/timeseries" - } - ], - "title": "Cloud Monitoring", - "type": "google/cloud/monitoring" - }, - { - "implemented": ">=0.5.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/pubsub/client/client" - }, - { - "title": "Topic", - "type": "google/cloud/pubsub/topic/topic" - }, - { - "title": "Subscription", - "type": "google/cloud/pubsub/subscription/subscription" - }, - { - "title": "Message", - "type": "google/cloud/pubsub/message/message" - }, - { - "title": "Policy", - "type": "google/cloud/pubsub/iam/policy" - } - ], - "title": "Pub/Sub", - "type": "google/cloud/pubsub" - }, - { - "implemented": ">=0.8.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/resource_manager/client/client" - }, - { - "title": "Project", - "type": "google/cloud/resource_manager/project/project" - } - ], - "title": "Resource Manager", - "type": "google/cloud/resource_manager" - }, - { - "implemented": ">=0.3.0", - "nav": [ - { - "title": "Client", - "type": "google/cloud/storage/client/client" - }, - { - "title": "Blob", - "type": "google/cloud/storage/blob/blob" - }, - { - "title": "Bucket", - "type": "google/cloud/storage/bucket/bucket" - }, - { - "title": "ACL", - "type": "google/cloud/storage/acl/acl" - }, - { - "title": "Batch", - "type": "google/cloud/storage/batch/batch" - } - ], - "title": "Storage", - "type": "google/cloud/storage" - }, - { - "nav": [ - { - "title": "Buffered Stream", - "type": "google/cloud/streaming/buffered_stream/bufferedstream" - }, - { - "title": "Request", - "type": "google/cloud/streaming/http_wrapper/request" - }, - { - "title": "Response", - "type": "google/cloud/streaming/http_wrapper/response" - }, - { - "title": "Stream Slice", - "type": "google/cloud/streaming/stream_slice/streamslice" - }, - { - "title": "Download", - "type": "google/cloud/streaming/transfer/download" - }, - { - "title": "Upload", - "type": "google/cloud/streaming/transfer/upload" - }, - { - "title": "Util", - "type": "google/cloud/streaming/util" - } - ], - "title": "Streaming", - "type": "google/cloud/streaming" - }, - { - "nav": [ - { - "title": "Connection", - "type": "google/cloud/vision/connection/connection" - } - ], - "title": "Vision", - "type": "google/cloud/vision" - } - ] -} diff --git a/docs/json/manifest.json b/docs/json/manifest.json deleted file mode 100644 index fa41098a95a2..000000000000 --- a/docs/json/manifest.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "content": "json", - "friendlyLang": "Python", - "home": "home.html", - "lang": "python", - "latestRelease": { - "date": 1455306471454, - "link": "https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master", - "name": "master" - }, - "markdown": "python", - "package": { - "href": "https://pypi.python.org/pypi/google-cloud", - "title": "PyPI" - }, - "titleDelimiter": ".", - "versions": [ - "master" - ] -} From 4a94be4a65855d610798a3a8b5809e08985d5e2a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 09:33:21 -0700 Subject: [PATCH 31/62] Changing all pypi.python.org links to warehouse links. (#3641) Done via $ export OLD='https\:\/\/pypi.python.org\/pypi\/' $ export NEW='https\:\/\/pypi.org\/project\/' $ git grep -l ${OLD} | xargs sed -i s/${OLD}/${NEW}/g Then manually going through and adding a trailing slash to all warehouse links. (Though I did undo changes to `docs/json/`.) --- CONTRIBUTING.rst | 2 +- README.rst | 34 +++++++++++++++++----------------- bigquery/README.rst | 4 ++-- bigtable/README.rst | 4 ++-- core/README.rst | 4 ++-- datastore/README.rst | 4 ++-- dns/README.rst | 4 ++-- docs/bigtable/usage.rst | 2 +- docs/index.rst | 2 +- docs/spanner/usage.rst | 3 +-- error_reporting/README.rst | 4 ++-- language/README.rst | 4 ++-- logging/README.rst | 4 ++-- monitoring/README.rst | 4 ++-- pubsub/README.rst | 4 ++-- resource_manager/README.rst | 4 ++-- runtimeconfig/README.rst | 4 ++-- speech/README.rst | 4 ++-- storage/README.rst | 4 ++-- translate/README.rst | 4 ++-- vision/README.rst | 4 ++-- 21 files changed, 53 insertions(+), 54 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a9f654a52c37..95a4dd13cfdb 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -402,7 +402,7 @@ instead of ``https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. -.. _description on PyPI: https://pypi.python.org/pypi/google-cloud +.. _description on PyPI: https://pypi.org/project/google-cloud/ ******************************************** Travis Configuration and Build Optimizations diff --git a/README.rst b/README.rst index b2fcb47df468..5e3615da46f9 100644 --- a/README.rst +++ b/README.rst @@ -57,35 +57,35 @@ Cloud Platform services: still a work-in-progress and is more likely to get backwards-incompatible updates. See `versioning`_ for more details. -.. _Google Cloud Datastore: https://pypi.python.org/pypi/google-cloud-datastore +.. _Google Cloud Datastore: https://pypi.org/project/google-cloud-datastore/ .. _Datastore README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/datastore -.. _Google Cloud Storage: https://pypi.python.org/pypi/google-cloud-storage +.. _Google Cloud Storage: https://pypi.org/project/google-cloud-storage/ .. _Storage README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/storage -.. _Google Cloud Pub/Sub: https://pypi.python.org/pypi/google-cloud-pubsub +.. _Google Cloud Pub/Sub: https://pypi.org/project/google-cloud-pubsub/ .. _Pub/Sub README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/pubsub -.. _Google BigQuery: https://pypi.python.org/pypi/google-cloud-bigquery +.. _Google BigQuery: https://pypi.org/project/google-cloud-bigquery/ .. _BigQuery README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/bigquery -.. _Google Cloud Resource Manager: https://pypi.python.org/pypi/google-cloud-resource-manager +.. _Google Cloud Resource Manager: https://pypi.org/project/google-cloud-resource-manager/ .. _Resource Manager README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/resource_manager -.. _Stackdriver Logging: https://pypi.python.org/pypi/google-cloud-logging +.. _Stackdriver Logging: https://pypi.org/project/google-cloud-logging/ .. _Logging README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/logging -.. _Stackdriver Monitoring: https://pypi.python.org/pypi/google-cloud-monitoring +.. _Stackdriver Monitoring: https://pypi.org/project/google-cloud-monitoring/ .. _Monitoring README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/monitoring -.. _Google Cloud Bigtable: https://pypi.python.org/pypi/google-cloud-bigtable +.. _Google Cloud Bigtable: https://pypi.org/project/google-cloud-bigtable/ .. _Bigtable README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/bigtable -.. _Google Cloud DNS: https://pypi.python.org/pypi/google-cloud-dns +.. _Google Cloud DNS: https://pypi.org/project/google-cloud-dns/ .. _DNS README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/dns -.. _Stackdriver Error Reporting: https://pypi.python.org/pypi/google-cloud-error-reporting +.. _Stackdriver Error Reporting: https://pypi.org/project/google-cloud-error-reporting/ .. _Error Reporting README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/error_reporting -.. _Google Cloud Natural Language: https://pypi.python.org/pypi/google-cloud-language +.. _Google Cloud Natural Language: https://pypi.org/project/google-cloud-language/ .. _Natural Language README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/language -.. _Google Cloud Translation: https://pypi.python.org/pypi/google-cloud-translate +.. _Google Cloud Translation: https://pypi.org/project/google-cloud-translate/ .. _Translation README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/translate -.. _Google Cloud Speech: https://pypi.python.org/pypi/google-cloud-speech +.. _Google Cloud Speech: https://pypi.org/project/google-cloud-speech/ .. _Speech README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/speech -.. _Google Cloud Vision: https://pypi.python.org/pypi/google-cloud-vision +.. _Google Cloud Vision: https://pypi.org/project/google-cloud-vision/ .. _Vision README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/vision -.. _Google Cloud Bigtable - HappyBase: https://pypi.python.org/pypi/google-cloud-happybase/ +.. _Google Cloud Bigtable - HappyBase: https://pypi.org/project/google-cloud-happybase/ .. _HappyBase README: https://github.com/GoogleCloudPlatform/google-cloud-python-happybase .. _Google Cloud Runtime Configuration: https://cloud.google.com/deployment-manager/runtime-configurator/ .. _Runtime Config README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/runtimeconfig @@ -159,6 +159,6 @@ Apache 2.0 - See `the LICENSE`_ for more information. .. |coverage| image:: https://coveralls.io/repos/GoogleCloudPlatform/google-cloud-python/badge.svg?branch=master :target: https://coveralls.io/r/GoogleCloudPlatform/google-cloud-python?branch=master .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud.svg - :target: https://pypi.python.org/pypi/google-cloud + :target: https://pypi.org/project/google-cloud/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud.svg - :target: https://pypi.python.org/pypi/google-cloud + :target: https://pypi.org/project/google-cloud/ diff --git a/bigquery/README.rst b/bigquery/README.rst index bf5bc55f1fa4..7e4f0cb72dae 100644 --- a/bigquery/README.rst +++ b/bigquery/README.rst @@ -89,6 +89,6 @@ to connect to BigQuery using this Client Library. .. _BigQuery documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/bigquery/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery.svg - :target: https://pypi.python.org/pypi/google-cloud-bigquery + :target: https://pypi.org/project/google-cloud-bigquery/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery.svg - :target: https://pypi.python.org/pypi/google-cloud-bigquery + :target: https://pypi.org/project/google-cloud-bigquery/ diff --git a/bigtable/README.rst b/bigtable/README.rst index 3385b882c28f..ebc202d8d87e 100644 --- a/bigtable/README.rst +++ b/bigtable/README.rst @@ -42,6 +42,6 @@ See the ``google-cloud-python`` API Bigtable `Documentation`_ to learn how to manage your data in Bigtable tables. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigtable.svg - :target: https://pypi.python.org/pypi/google-cloud-bigtable + :target: https://pypi.org/project/google-cloud-bigtable/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigtable.svg - :target: https://pypi.python.org/pypi/google-cloud-bigtable + :target: https://pypi.org/project/google-cloud-bigtable/ diff --git a/core/README.rst b/core/README.rst index e9e7e19278ce..53cbd311a50e 100644 --- a/core/README.rst +++ b/core/README.rst @@ -19,6 +19,6 @@ Quick Start $ pip install --upgrade google-cloud-core .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-core.svg - :target: https://pypi.python.org/pypi/google-cloud-core + :target: https://pypi.org/project/google-cloud-core/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-core.svg - :target: https://pypi.python.org/pypi/google-cloud-core + :target: https://pypi.org/project/google-cloud-core/ diff --git a/datastore/README.rst b/datastore/README.rst index dbfc252564ea..89ba561baed3 100644 --- a/datastore/README.rst +++ b/datastore/README.rst @@ -67,6 +67,6 @@ how to activate Cloud Datastore for your project. print(result) .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-datastore.svg - :target: https://pypi.python.org/pypi/google-cloud-datastore + :target: https://pypi.org/project/google-cloud-datastore/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-datastore.svg - :target: https://pypi.python.org/pypi/google-cloud-datastore + :target: https://pypi.org/project/google-cloud-datastore/ diff --git a/dns/README.rst b/dns/README.rst index 7f46dce1d617..e5882fd0adc2 100644 --- a/dns/README.rst +++ b/dns/README.rst @@ -42,6 +42,6 @@ See the ``google-cloud-python`` API DNS `Documentation`_ to learn how to manage DNS records using this Client Library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-dns.svg - :target: https://pypi.python.org/pypi/google-cloud-dns + :target: https://pypi.org/project/google-cloud-dns/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-dns.svg - :target: https://pypi.python.org/pypi/google-cloud-dns + :target: https://pypi.org/project/google-cloud-dns/ diff --git a/docs/bigtable/usage.rst b/docs/bigtable/usage.rst index 421b2426f8cf..f5df56fdee74 100644 --- a/docs/bigtable/usage.rst +++ b/docs/bigtable/usage.rst @@ -40,4 +40,4 @@ In the hierarchy of API concepts .. _Google Cloud Bigtable: https://cloud.google.com/bigtable/docs/ .. _gRPC: http://www.grpc.io/ -.. _grpcio: https://pypi.python.org/pypi/grpcio +.. _grpcio: https://pypi.org/project/grpcio/ diff --git a/docs/index.rst b/docs/index.rst index 8c81cefdda2e..ee47a2ac378f 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -73,4 +73,4 @@ Resources * `GitHub `__ * `Issues `__ * `Stack Overflow `__ -* `PyPI `__ +* `PyPI `__ diff --git a/docs/spanner/usage.rst b/docs/spanner/usage.rst index 0d9142041523..2d61fbaed9c7 100644 --- a/docs/spanner/usage.rst +++ b/docs/spanner/usage.rst @@ -40,5 +40,4 @@ In the hierarchy of API concepts .. _Cloud Spanner: https://cloud.google.com/spanner/docs/ .. _gRPC: http://www.grpc.io/ -.. _grpcio: https://pypi.python.org/pypi/grpcio - +.. _grpcio: https://pypi.org/project/grpcio/ diff --git a/error_reporting/README.rst b/error_reporting/README.rst index 69308b1ce0e2..104856f348e6 100644 --- a/error_reporting/README.rst +++ b/error_reporting/README.rst @@ -47,6 +47,6 @@ See the ``google-cloud-python`` API Error Reporting `Documentation`_ to learn how to get started using this library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-error-reporting.svg - :target: https://pypi.python.org/pypi/google-cloud-error-reporting + :target: https://pypi.org/project/google-cloud-error-reporting/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-error-reporting.svg - :target: https://pypi.python.org/pypi/google-cloud-error-reporting + :target: https://pypi.org/project/google-cloud-error-reporting/ diff --git a/language/README.rst b/language/README.rst index 8685c8925313..9940503a7832 100644 --- a/language/README.rst +++ b/language/README.rst @@ -46,6 +46,6 @@ See the ``google-cloud-python`` API Natural Language `Documentation`_ to learn how to analyze text with this API. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-language.svg - :target: https://pypi.python.org/pypi/google-cloud-language + :target: https://pypi.org/project/google-cloud-language/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-language.svg - :target: https://pypi.python.org/pypi/google-cloud-language + :target: https://pypi.org/project/google-cloud-language/ diff --git a/logging/README.rst b/logging/README.rst index a706b50079ac..8cf274e4e4a1 100644 --- a/logging/README.rst +++ b/logging/README.rst @@ -57,6 +57,6 @@ connect to Stackdriver Logging using this Client Library. .. _logging documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/logging/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-logging.svg - :target: https://pypi.python.org/pypi/google-cloud-logging + :target: https://pypi.org/project/google-cloud-logging/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-logging.svg - :target: https://pypi.python.org/pypi/google-cloud-logging + :target: https://pypi.org/project/google-cloud-logging/ diff --git a/monitoring/README.rst b/monitoring/README.rst index 4debab64a3ea..f5a8bb8ecb77 100644 --- a/monitoring/README.rst +++ b/monitoring/README.rst @@ -67,6 +67,6 @@ to connect to Stackdriver Monitoring using this Client Library. .. _monitoring documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/monitoring/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-monitoring.svg - :target: https://pypi.python.org/pypi/google-cloud-monitoring + :target: https://pypi.org/project/google-cloud-monitoring/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-monitoring.svg - :target: https://pypi.python.org/pypi/google-cloud-monitoring + :target: https://pypi.org/project/google-cloud-monitoring/ diff --git a/pubsub/README.rst b/pubsub/README.rst index 472b74eb1bf0..bf116676a440 100644 --- a/pubsub/README.rst +++ b/pubsub/README.rst @@ -61,6 +61,6 @@ To get started with this API, you'll need to create attr1='value1', attr2='value2') .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg - :target: https://pypi.python.org/pypi/google-cloud-pubsub + :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg - :target: https://pypi.python.org/pypi/google-cloud-pubsub + :target: https://pypi.org/project/google-cloud-pubsub/ diff --git a/resource_manager/README.rst b/resource_manager/README.rst index f0e67ca4750e..fe6864580ed5 100644 --- a/resource_manager/README.rst +++ b/resource_manager/README.rst @@ -45,6 +45,6 @@ how to manage projects using this Client Library. .. _Resource Manager documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/resource-manager/api.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-resource-manager.svg - :target: https://pypi.python.org/pypi/google-cloud-resource-manager + :target: https://pypi.org/project/google-cloud-resource-manager/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-resource-manager.svg - :target: https://pypi.python.org/pypi/google-cloud-resource-manager + :target: https://pypi.org/project/google-cloud-resource-manager/ diff --git a/runtimeconfig/README.rst b/runtimeconfig/README.rst index b8d79b3fd3d3..d061bea3eda2 100644 --- a/runtimeconfig/README.rst +++ b/runtimeconfig/README.rst @@ -48,6 +48,6 @@ See the ``google-cloud-python`` API runtimeconfig `Documentation`_ to learn how to interact with Cloud RuntimeConfig using this Client Library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-runtimeconfig.svg - :target: https://pypi.python.org/pypi/google-cloud-runtimeconfig + :target: https://pypi.org/project/google-cloud-runtimeconfig/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-runtimeconfig.svg - :target: https://pypi.python.org/pypi/google-cloud-runtimeconfig + :target: https://pypi.org/project/google-cloud-runtimeconfig/ diff --git a/speech/README.rst b/speech/README.rst index ce67559f09e7..150fc37bb590 100644 --- a/speech/README.rst +++ b/speech/README.rst @@ -43,6 +43,6 @@ connect to the Google Cloud Speech API using this Client Library. .. _speech documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/speech/usage.html .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-speech.svg - :target: https://pypi.python.org/pypi/google-cloud-speech + :target: https://pypi.org/project/google-cloud-speech/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-speech.svg - :target: https://pypi.python.org/pypi/google-cloud-speech + :target: https://pypi.org/project/google-cloud-speech/ diff --git a/storage/README.rst b/storage/README.rst index 553c377a2be3..d291fc389c23 100644 --- a/storage/README.rst +++ b/storage/README.rst @@ -64,6 +64,6 @@ how to create a bucket. blob2.upload_from_filename(filename='/local/path.txt') .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-storage.svg - :target: https://pypi.python.org/pypi/google-cloud-storage + :target: https://pypi.org/project/google-cloud-storage/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-storage.svg - :target: https://pypi.python.org/pypi/google-cloud-storage + :target: https://pypi.org/project/google-cloud-storage/ diff --git a/translate/README.rst b/translate/README.rst index 7e56d081cf46..18bc34002258 100644 --- a/translate/README.rst +++ b/translate/README.rst @@ -42,6 +42,6 @@ See the ``google-cloud-python`` API Translation `Documentation`_ to learn how to translate text using this library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-translate.svg - :target: https://pypi.python.org/pypi/google-cloud-translate + :target: https://pypi.org/project/google-cloud-translate/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-translate.svg - :target: https://pypi.python.org/pypi/google-cloud-translate + :target: https://pypi.org/project/google-cloud-translate/ diff --git a/vision/README.rst b/vision/README.rst index 0056d714dd5c..d54f36c3e8c8 100644 --- a/vision/README.rst +++ b/vision/README.rst @@ -50,6 +50,6 @@ See the ``google-cloud-python`` API `Documentation`_ to learn how to analyze images using this library. .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-vision.svg - :target: https://pypi.python.org/pypi/google-cloud-vision + :target: https://pypi.org/project/google-cloud-vision/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-vision.svg - :target: https://pypi.python.org/pypi/google-cloud-vision + :target: https://pypi.org/project/google-cloud-vision/ From a2f319f4c36cbce7522cc73232818f81aa5fd741 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 20 Jul 2017 10:07:12 -0700 Subject: [PATCH 32/62] Revert "RPC retries (second PR) (#3324)" (#3642) This reverts commit 67f4ba47069146a9b93005e38046eb2cd59b150a. --- bigtable/google/cloud/bigtable/retry.py | 169 ------------------- bigtable/google/cloud/bigtable/row_data.py | 3 - bigtable/google/cloud/bigtable/table.py | 101 +++++++---- bigtable/tests/retry_test_script.txt | 38 ----- bigtable/tests/system.py | 78 --------- bigtable/tests/unit/_testing.py | 27 +-- bigtable/tests/unit/test_table.py | 185 ++------------------- 7 files changed, 81 insertions(+), 520 deletions(-) delete mode 100644 bigtable/google/cloud/bigtable/retry.py delete mode 100644 bigtable/tests/retry_test_script.txt diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py deleted file mode 100644 index f20419ce4f8e..000000000000 --- a/bigtable/google/cloud/bigtable/retry.py +++ /dev/null @@ -1,169 +0,0 @@ -"""Provides function wrappers that implement retrying.""" -import random -import time -import six -import sys - -from google.cloud._helpers import _to_bytes -from google.cloud.bigtable._generated import ( - bigtable_pb2 as data_messages_v2_pb2) -from google.gax import config, errors -from grpc import RpcError - - -_MILLIS_PER_SECOND = 1000 - - -class ReadRowsIterator(object): - """Creates an iterator equivalent to a_iter, but that retries on certain - exceptions. - """ - - def __init__(self, client, name, start_key, end_key, filter_, limit, - retry_options, **kwargs): - self.client = client - self.retry_options = retry_options - self.name = name - self.start_key = start_key - self.start_key_closed = True - self.end_key = end_key - self.filter_ = filter_ - self.limit = limit - self.delay_mult = retry_options.backoff_settings.retry_delay_multiplier - self.max_delay_millis = \ - retry_options.backoff_settings.max_retry_delay_millis - self.timeout_mult = \ - retry_options.backoff_settings.rpc_timeout_multiplier - self.max_timeout = \ - (retry_options.backoff_settings.max_rpc_timeout_millis / - _MILLIS_PER_SECOND) - self.total_timeout = \ - (retry_options.backoff_settings.total_timeout_millis / - _MILLIS_PER_SECOND) - self.set_stream() - - def set_start_key(self, start_key): - """ - Sets the row key at which this iterator will begin reading. - """ - self.start_key = start_key - self.start_key_closed = False - - def set_stream(self): - """ - Resets the read stream by making an RPC on the 'ReadRows' endpoint. - """ - req_pb = _create_row_request(self.name, start_key=self.start_key, - start_key_closed=self.start_key_closed, - end_key=self.end_key, - filter_=self.filter_, limit=self.limit) - self.stream = self.client._data_stub.ReadRows(req_pb) - - def next(self, *args, **kwargs): - """ - Read and return the next row from the stream. - Retry on idempotent failure. - """ - delay = self.retry_options.backoff_settings.initial_retry_delay_millis - exc = errors.RetryError('Retry total timeout exceeded before any' - 'response was received') - timeout = (self.retry_options.backoff_settings - .initial_rpc_timeout_millis / - _MILLIS_PER_SECOND) - - now = time.time() - deadline = now + self.total_timeout - while deadline is None or now < deadline: - try: - return six.next(self.stream) - except StopIteration as stop: - raise stop - except RpcError as error: # pylint: disable=broad-except - code = config.exc_to_code(error) - if code not in self.retry_options.retry_codes: - six.reraise(type(error), error) - - # pylint: disable=redefined-variable-type - exc = errors.RetryError( - 'Retry total timeout exceeded with exception', error) - - # Sleep a random number which will, on average, equal the - # expected delay. - to_sleep = random.uniform(0, delay * 2) - time.sleep(to_sleep / _MILLIS_PER_SECOND) - delay = min(delay * self.delay_mult, self.max_delay_millis) - now = time.time() - timeout = min( - timeout * self.timeout_mult, self.max_timeout, - deadline - now) - self.set_stream() - - six.reraise(errors.RetryError, exc, sys.exc_info()[2]) - - def __next__(self, *args, **kwargs): - return self.next(*args, **kwargs) - - -def _create_row_request(table_name, row_key=None, start_key=None, - start_key_closed=True, end_key=None, filter_=None, - limit=None): - """Creates a request to read rows in a table. - - :type table_name: str - :param table_name: The name of the table to read from. - - :type row_key: bytes - :param row_key: (Optional) The key of a specific row to read from. - - :type start_key: bytes - :param start_key: (Optional) The beginning of a range of row keys to - read from. The range will include ``start_key``. If - left empty, will be interpreted as the empty string. - - :type end_key: bytes - :param end_key: (Optional) The end of a range of row keys to read from. - The range will not include ``end_key``. If left empty, - will be interpreted as an infinite string. - - :type filter_: :class:`.RowFilter` - :param filter_: (Optional) The filter to apply to the contents of the - specified row(s). If unset, reads the entire table. - - :type limit: int - :param limit: (Optional) The read will terminate after committing to N - rows' worth of results. The default (zero) is to return - all results. - - :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` - :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. - :raises: :class:`ValueError ` if both - ``row_key`` and one of ``start_key`` and ``end_key`` are set - """ - request_kwargs = {'table_name': table_name} - if (row_key is not None and - (start_key is not None or end_key is not None)): - raise ValueError('Row key and row range cannot be ' - 'set simultaneously') - range_kwargs = {} - if start_key is not None or end_key is not None: - if start_key is not None: - if start_key_closed: - range_kwargs['start_key_closed'] = _to_bytes(start_key) - else: - range_kwargs['start_key_open'] = _to_bytes(start_key) - if end_key is not None: - range_kwargs['end_key_open'] = _to_bytes(end_key) - if filter_ is not None: - request_kwargs['filter'] = filter_.to_pb() - if limit is not None: - request_kwargs['rows_limit'] = limit - - message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) - - if row_key is not None: - message.rows.row_keys.append(_to_bytes(row_key)) - - if range_kwargs: - message.rows.row_ranges.add(**range_kwargs) - - return message diff --git a/bigtable/google/cloud/bigtable/row_data.py b/bigtable/google/cloud/bigtable/row_data.py index 0849e681b7e6..78179db25c4e 100644 --- a/bigtable/google/cloud/bigtable/row_data.py +++ b/bigtable/google/cloud/bigtable/row_data.py @@ -274,9 +274,6 @@ def consume_next(self): self._validate_chunk(chunk) - if hasattr(self._response_iterator, 'set_start_key'): - self._response_iterator.set_start_key(chunk.row_key) - if chunk.reset_row: row = self._row = None cell = self._cell = self._previous_cell = None diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index ad6fab88dcf9..40ef3a2ca2fb 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -17,6 +17,7 @@ import six +from google.cloud._helpers import _to_bytes from google.cloud.bigtable._generated import ( bigtable_pb2 as data_messages_v2_pb2) from google.cloud.bigtable._generated import ( @@ -29,26 +30,6 @@ from google.cloud.bigtable.row import ConditionalRow from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.row_data import PartialRowsData -from google.gax import RetryOptions, BackoffSettings -from google.cloud.bigtable.retry import ReadRowsIterator, _create_row_request -from grpc import StatusCode - -BACKOFF_SETTINGS = BackoffSettings( - initial_retry_delay_millis=10, - retry_delay_multiplier=1.3, - max_retry_delay_millis=30000, - initial_rpc_timeout_millis=25 * 60 * 1000, - rpc_timeout_multiplier=1.0, - max_rpc_timeout_millis=25 * 60 * 1000, - total_timeout_millis=30 * 60 * 1000 -) - -RETRY_CODES = [ - StatusCode.DEADLINE_EXCEEDED, - StatusCode.ABORTED, - StatusCode.INTERNAL, - StatusCode.UNAVAILABLE -] # Maximum number of mutations in bulk (MutateRowsRequest message): @@ -276,7 +257,7 @@ def read_row(self, row_key, filter_=None): return rows_data.rows[row_key] def read_rows(self, start_key=None, end_key=None, limit=None, - filter_=None, backoff_settings=None): + filter_=None): """Read rows from this table. :type start_key: bytes @@ -303,18 +284,13 @@ def read_rows(self, start_key=None, end_key=None, limit=None, :returns: A :class:`.PartialRowsData` convenience wrapper for consuming the streamed results. """ + request_pb = _create_row_request( + self.name, start_key=start_key, end_key=end_key, filter_=filter_, + limit=limit) client = self._instance._client - if backoff_settings is None: - backoff_settings = BACKOFF_SETTINGS - RETRY_OPTIONS = RetryOptions( - retry_codes=RETRY_CODES, - backoff_settings=backoff_settings - ) - - retrying_iterator = ReadRowsIterator(client, self.name, start_key, - end_key, filter_, limit, - RETRY_OPTIONS) - return PartialRowsData(retrying_iterator) + response_iterator = client._data_stub.ReadRows(request_pb) + # We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse` + return PartialRowsData(response_iterator) def mutate_rows(self, rows): """Mutates multiple rows in bulk. @@ -383,6 +359,67 @@ def sample_row_keys(self): return response_iterator +def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, + filter_=None, limit=None): + """Creates a request to read rows in a table. + + :type table_name: str + :param table_name: The name of the table to read from. + + :type row_key: bytes + :param row_key: (Optional) The key of a specific row to read from. + + :type start_key: bytes + :param start_key: (Optional) The beginning of a range of row keys to + read from. The range will include ``start_key``. If + left empty, will be interpreted as the empty string. + + :type end_key: bytes + :param end_key: (Optional) The end of a range of row keys to read from. + The range will not include ``end_key``. If left empty, + will be interpreted as an infinite string. + + :type filter_: :class:`.RowFilter` + :param filter_: (Optional) The filter to apply to the contents of the + specified row(s). If unset, reads the entire table. + + :type limit: int + :param limit: (Optional) The read will terminate after committing to N + rows' worth of results. The default (zero) is to return + all results. + + :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` + :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. + :raises: :class:`ValueError ` if both + ``row_key`` and one of ``start_key`` and ``end_key`` are set + """ + request_kwargs = {'table_name': table_name} + if (row_key is not None and + (start_key is not None or end_key is not None)): + raise ValueError('Row key and row range cannot be ' + 'set simultaneously') + range_kwargs = {} + if start_key is not None or end_key is not None: + if start_key is not None: + range_kwargs['start_key_closed'] = _to_bytes(start_key) + if end_key is not None: + range_kwargs['end_key_open'] = _to_bytes(end_key) + if filter_ is not None: + request_kwargs['filter'] = filter_.to_pb() + if limit is not None: + request_kwargs['rows_limit'] = limit + + message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) + + if row_key is not None: + message.rows.row_keys.append(_to_bytes(row_key)) + + if range_kwargs: + message.rows.row_ranges.add(**range_kwargs) + + return message + + def _mutate_rows_request(table_name, rows): """Creates a request to mutate rows in a table. diff --git a/bigtable/tests/retry_test_script.txt b/bigtable/tests/retry_test_script.txt deleted file mode 100644 index 863662e897ba..000000000000 --- a/bigtable/tests/retry_test_script.txt +++ /dev/null @@ -1,38 +0,0 @@ -# This retry script is processed by the retry server and the client under test. -# Client tests should parse any command beginning with "CLIENT:", send the corresponding RPC -# to the retry server and expect a valid response. -# "EXPECT" commands indicate the call the server is expecting the client to send. -# -# The retry server has one table named "table" that should be used for testing. -# There are three types of commands supported: -# READ -# Expect the corresponding rows to be returned with arbitrary values. -# SCAN ... -# Ranges are expressed as an interval with either open or closed start and end, -# such as [1,3) for "1,2" or (1, 3] for "2,3". -# WRITE -# All writes should succeed eventually. Value payload is ignored. -# The server writes PASS or FAIL on a line by itself to STDOUT depending on the result of the test. -# All other server output should be ignored. - -# Echo same scan back after immediate error -CLIENT: SCAN [r1,r3) r1,r2 -EXPECT: SCAN [r1,r3) -SERVER: ERROR Unavailable -EXPECT: SCAN [r1,r3) -SERVER: READ_RESPONSE r1,r2 - -# Retry scans with open interval starting at the least read row key. -# Instead of using open intervals for retry ranges, '\x00' can be -# appended to the last received row key and sent in a closed interval. -CLIENT: SCAN [r1,r9) r1,r2,r3,r4,r5,r6,r7,r8 -EXPECT: SCAN [r1,r9) -SERVER: READ_RESPONSE r1,r2,r3,r4 -SERVER: ERROR Unavailable -EXPECT: SCAN (r4,r9) -SERVER: ERROR Unavailable -EXPECT: SCAN (r4,r9) -SERVER: READ_RESPONSE r5,r6,r7 -SERVER: ERROR Unavailable -EXPECT: SCAN (r7,r9) -SERVER: READ_RESPONSE r8 diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 5a5b4324cbbe..1fcda808db39 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -295,84 +295,6 @@ def test_delete_column_family(self): # Make sure we have successfully deleted it. self.assertEqual(temp_table.list_column_families(), {}) - def test_retry(self): - import subprocess, os, stat, platform - from google.cloud.bigtable.client import Client - from google.cloud.bigtable.instance import Instance - from google.cloud.bigtable.table import Table - - # import for urlopen based on version - try: - # python 3 - from urllib.request import urlopen - except ImportError: - # python 2 - from urllib2 import urlopen - - - TEST_SCRIPT = 'tests/retry_test_script.txt' - SERVER_NAME = 'retry_server' - SERVER_ZIP = SERVER_NAME + ".tar.gz" - - def process_scan(table, range, ids): - range_chunks = range.split(",") - range_open = range_chunks[0].lstrip("[") - range_close = range_chunks[1].rstrip(")") - rows = table.read_rows(range_open, range_close) - rows.consume_all() - - # Download server - MOCK_SERVER_URLS = { - 'Linux': 'https://storage.googleapis.com/cloud-bigtable-test/retries/retry_server_linux.tar.gz', - 'Darwin': 'https://storage.googleapis.com/cloud-bigtable-test/retries/retry_server_mac.tar.gz', - } - - test_platform = platform.system() - if test_platform not in MOCK_SERVER_URLS: - self.skip('Retry server not available for platform {0}.'.format(test_platform)) - - mock_server_download = urlopen(MOCK_SERVER_URLS[test_platform]).read() - mock_server_file = open(SERVER_ZIP, 'wb') - mock_server_file.write(mock_server_download) - - # Unzip server - subprocess.call(['tar', 'zxvf', SERVER_ZIP, '-C', '.']) - - # Connect to server - server = subprocess.Popen( - ['./' + SERVER_NAME, '--script=' + TEST_SCRIPT], - stdin=subprocess.PIPE, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - - (endpoint, port) = server.stdout.readline().rstrip("\n").split(":") - os.environ["BIGTABLE_EMULATOR_HOST"] = endpoint + ":" + port - client = Client(project="client", admin=True) - instance = Instance("instance", client) - table = instance.table("table") - - # Run test, line by line - with open(TEST_SCRIPT, 'r') as script: - for line in script.readlines(): - if line.startswith("CLIENT:"): - chunks = line.split(" ") - op = chunks[1] - process_scan(table, chunks[2], chunks[3]) - - # Check that the test passed - server.kill() - server_stdout_lines = [] - while True: - line = server.stdout.readline() - if line != '': - server_stdout_lines.append(line) - else: - break - self.assertEqual(server_stdout_lines[-1], "PASS\n") - - # Clean up - os.remove(SERVER_ZIP) - os.remove(SERVER_NAME) class TestDataAPI(unittest.TestCase): diff --git a/bigtable/tests/unit/_testing.py b/bigtable/tests/unit/_testing.py index 7587c66c133b..e67af6a1498c 100644 --- a/bigtable/tests/unit/_testing.py +++ b/bigtable/tests/unit/_testing.py @@ -14,6 +14,7 @@ """Mocks used to emulate gRPC generated objects.""" + class _FakeStub(object): """Acts as a gPRC stub.""" @@ -26,16 +27,6 @@ def __getattr__(self, name): # since __getattribute__ will handle them. return _MethodMock(name, self) -class _CustomFakeStub(object): - """Acts as a gRPC stub. Generates a result using an injected callable.""" - def __init__(self, result_callable): - self.result_callable = result_callable - self.method_calls = [] - - def __getattr__(self, name): - # We need not worry about attributes set in constructor - # since __getattribute__ will handle them. - return _CustomMethodMock(name, self) class _MethodMock(object): """Mock for API method attached to a gRPC stub. @@ -51,19 +42,5 @@ def __call__(self, *args, **kwargs): """Sync method meant to mock a gRPC stub request.""" self._stub.method_calls.append((self._name, args, kwargs)) curr_result, self._stub.results = (self._stub.results[0], - self._stub.results[1:]) + self._stub.results[1:]) return curr_result - -class _CustomMethodMock(object): - """ - Same as _MethodMock, but backed by an injected callable. - """ - - def __init__(self, name, stub): - self._name = name - self._stub = stub - - def __call__(self, *args, **kwargs): - """Sync method meant to mock a gRPC stub request.""" - self._stub.method_calls.append((self._name, args, kwargs)) - return self._stub.result_callable() diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index c59667d6a821..dc4d2b5bbad0 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -493,8 +493,7 @@ def test_read_rows(self): from google.cloud._testing import _Monkey from tests.unit._testing import _FakeStub from google.cloud.bigtable.row_data import PartialRowsData - from google.cloud.bigtable import retry as MUT - from google.cloud.bigtable.retry import ReadRowsIterator + from google.cloud.bigtable import table as MUT client = _Client() instance = _Instance(self.INSTANCE_NAME, client=client) @@ -514,18 +513,20 @@ def mock_create_row_request(table_name, **kwargs): # Patch the stub used by the API method. client._data_stub = stub = _FakeStub(response_iterator) + # Create expected_result. + expected_result = PartialRowsData(response_iterator) + + # Perform the method and check the result. start_key = b'start-key' end_key = b'end-key' filter_obj = object() limit = 22 with _Monkey(MUT, _create_row_request=mock_create_row_request): - # Perform the method and check the result. result = table.read_rows( start_key=start_key, end_key=end_key, filter_=filter_obj, limit=limit) - self.assertIsInstance(result._response_iterator, ReadRowsIterator) - self.assertEqual(result._response_iterator.client, client) + self.assertEqual(result, expected_result) self.assertEqual(stub.method_calls, [( 'ReadRows', (request_pb,), @@ -536,166 +537,9 @@ def mock_create_row_request(table_name, **kwargs): 'end_key': end_key, 'filter_': filter_obj, 'limit': limit, - 'start_key_closed': True, } self.assertEqual(mock_created, [(table.name, created_kwargs)]) - def test_read_rows_one_chunk(self): - from google.cloud._testing import _Monkey - from tests.unit._testing import _FakeStub - from google.cloud.bigtable import retry as MUT - from google.cloud.bigtable.retry import ReadRowsIterator - from google.cloud.bigtable.row_data import Cell - from google.cloud.bigtable.row_data import PartialRowsData - - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - table = self._make_one(self.TABLE_ID, instance) - - # Create request_pb - request_pb = object() # Returned by our mock. - mock_created = [] - - def mock_create_row_request(table_name, **kwargs): - mock_created.append((table_name, kwargs)) - return request_pb - - # Create response_iterator - chunk = _ReadRowsResponseCellChunkPB( - row_key=self.ROW_KEY, - family_name=self.FAMILY_NAME, - qualifier=self.QUALIFIER, - timestamp_micros=self.TIMESTAMP_MICROS, - value=self.VALUE, - commit_row=True, - ) - response_pb = _ReadRowsResponsePB(chunks=[chunk]) - response_iterator = iter([response_pb]) - - # Patch the stub used by the API method. - client._data_stub = stub = _FakeStub(response_iterator) - - start_key = b'start-key' - end_key = b'end-key' - filter_obj = object() - limit = 22 - with _Monkey(MUT, _create_row_request=mock_create_row_request): - # Perform the method and check the result. - result = table.read_rows( - start_key=start_key, end_key=end_key, filter_=filter_obj, - limit=limit) - result.consume_all() - - def test_read_rows_retry_timeout(self): - from google.cloud._testing import _Monkey - from tests.unit._testing import _CustomFakeStub - from google.cloud.bigtable.row_data import PartialRowsData - from google.cloud.bigtable import retry as MUT - from google.cloud.bigtable.retry import ReadRowsIterator - from google.gax import BackoffSettings - from google.gax.errors import RetryError - from grpc import StatusCode, RpcError - import time - - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - table = self._make_one(self.TABLE_ID, instance) - - # Create request_pb - request_pb = object() # Returned by our mock. - mock_created = [] - - def mock_create_row_request(table_name, **kwargs): - mock_created.append((table_name, kwargs)) - return request_pb - - # Create a slow response iterator to cause a timeout - class MockTimeoutError(RpcError): - def code(self): - return StatusCode.DEADLINE_EXCEEDED - - def _wait_then_raise(): - time.sleep(0.1) - raise MockTimeoutError() - - # Patch the stub used by the API method. The stub should create a new - # slow_iterator every time its queried. - def make_slow_iterator(): - return (_wait_then_raise() for i in range(10)) - client._data_stub = stub = _CustomFakeStub(make_slow_iterator) - - # Set to timeout before RPC completes - test_backoff_settings = BackoffSettings( - initial_retry_delay_millis=10, - retry_delay_multiplier=0.3, - max_retry_delay_millis=30000, - initial_rpc_timeout_millis=1000, - rpc_timeout_multiplier=1.0, - max_rpc_timeout_millis=25 * 60 * 1000, - total_timeout_millis=1000 - ) - - start_key = b'start-key' - end_key = b'end-key' - filter_obj = object() - limit = 22 - with _Monkey(MUT, _create_row_request=mock_create_row_request): - # Verify that a RetryError is thrown on read. - result = table.read_rows( - start_key=start_key, end_key=end_key, filter_=filter_obj, - limit=limit, backoff_settings=test_backoff_settings) - with self.assertRaises(RetryError): - result.consume_next() - - def test_read_rows_non_idempotent_error_throws(self): - from google.cloud._testing import _Monkey - from tests.unit._testing import _CustomFakeStub - from google.cloud.bigtable.row_data import PartialRowsData - from google.cloud.bigtable import retry as MUT - from google.cloud.bigtable.retry import ReadRowsIterator - from google.gax import BackoffSettings - from google.gax.errors import RetryError - from grpc import StatusCode, RpcError - import time - - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - table = self._make_one(self.TABLE_ID, instance) - - # Create request_pb - request_pb = object() # Returned by our mock. - mock_created = [] - - def mock_create_row_request(table_name, **kwargs): - mock_created.append((table_name, kwargs)) - return request_pb - - # Create response iterator that raises a non-idempotent exception - class MockNonIdempotentError(RpcError): - def code(self): - return StatusCode.RESOURCE_EXHAUSTED - - def _raise(): - raise MockNonIdempotentError() - - # Patch the stub used by the API method. The stub should create a new - # slow_iterator every time its queried. - def make_raising_iterator(): - return (_raise() for i in range(10)) - client._data_stub = stub = _CustomFakeStub(make_raising_iterator) - - start_key = b'start-key' - end_key = b'end-key' - filter_obj = object() - limit = 22 - with _Monkey(MUT, _create_row_request=mock_create_row_request): - # Verify that a RetryError is thrown on read. - result = table.read_rows( - start_key=start_key, end_key=end_key, filter_=filter_obj, - limit=limit) - with self.assertRaises(MockNonIdempotentError): - result.consume_next() - def test_sample_row_keys(self): from tests.unit._testing import _FakeStub @@ -728,12 +572,12 @@ def test_sample_row_keys(self): class Test__create_row_request(unittest.TestCase): def _call_fut(self, table_name, row_key=None, start_key=None, end_key=None, - start_key_closed=True, filter_=None, limit=None): - from google.cloud.bigtable.retry import _create_row_request + filter_=None, limit=None): + from google.cloud.bigtable.table import _create_row_request return _create_row_request( table_name, row_key=row_key, start_key=start_key, end_key=end_key, - start_key_closed=start_key_closed, filter_=filter_, limit=limit) + filter_=filter_, limit=limit) def test_table_name_only(self): table_name = 'table_name' @@ -756,7 +600,7 @@ def test_row_key(self): expected_result.rows.row_keys.append(row_key) self.assertEqual(result, expected_result) - def test_row_range_start_key_closed(self): + def test_row_range_start_key(self): table_name = 'table_name' start_key = b'start_key' result = self._call_fut(table_name, start_key=start_key) @@ -764,15 +608,6 @@ def test_row_range_start_key_closed(self): expected_result.rows.row_ranges.add(start_key_closed=start_key) self.assertEqual(result, expected_result) - def test_row_range_start_key_open(self): - table_name = 'table_name' - start_key = b'start_key' - result = self._call_fut(table_name, start_key=start_key, - start_key_closed=False) - expected_result = _ReadRowsRequestPB(table_name=table_name) - expected_result.rows.row_ranges.add(start_key_open=start_key) - self.assertEqual(result, expected_result) - def test_row_range_end_key(self): table_name = 'table_name' end_key = b'end_key' From 698bc8b9c0f542b73b1a2b24efef5ebb8961a2d3 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 20 Jul 2017 10:18:50 -0700 Subject: [PATCH 33/62] Remove references to google.cloud.operation from spanner (#3625) * Remove references to google.cloud.operation from spanner * Remove unused import --- spanner/google/cloud/spanner/database.py | 11 +---------- spanner/google/cloud/spanner/instance.py | 11 ++--------- spanner/tests/unit/test_database.py | 4 ---- spanner/tests/unit/test_instance.py | 4 ---- 4 files changed, 3 insertions(+), 27 deletions(-) diff --git a/spanner/google/cloud/spanner/database.py b/spanner/google/cloud/spanner/database.py index 12af9ca20edb..a449f304bf79 100644 --- a/spanner/google/cloud/spanner/database.py +++ b/spanner/google/cloud/spanner/database.py @@ -18,8 +18,6 @@ from google.gax.errors import GaxError from google.gax.grpc import exc_to_code -from google.cloud.proto.spanner.admin.database.v1 import ( - spanner_database_admin_pb2 as admin_v1_pb2) from google.cloud.gapic.spanner.v1.spanner_client import SpannerClient from grpc import StatusCode import six @@ -27,7 +25,6 @@ # pylint: disable=ungrouped-imports from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -from google.cloud.operation import register_type from google.cloud.spanner import __version__ from google.cloud.spanner._helpers import _options_with_prefix from google.cloud.spanner.batch import Batch @@ -44,10 +41,6 @@ r'databases/(?P[a-z][a-z0-9_\-]*[a-z0-9])$' ) -register_type(admin_v1_pb2.Database) -register_type(admin_v1_pb2.CreateDatabaseMetadata) -register_type(admin_v1_pb2.UpdateDatabaseDdlMetadata) - class Database(object): """Representation of a Cloud Spanner Database. @@ -205,7 +198,6 @@ def create(self): )) raise - future.caller_metadata = {'request_type': 'CreateDatabase'} return future def exists(self): @@ -252,7 +244,7 @@ def update_ddl(self, ddl_statements): See https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.DatabaseAdmin.UpdateDatabase - :rtype: :class:`google.cloud.operation.Operation` + :rtype: :class:`google.cloud.future.operation.Operation` :returns: an operation instance """ client = self._instance._client @@ -267,7 +259,6 @@ def update_ddl(self, ddl_statements): raise NotFound(self.name) raise - future.caller_metadata = {'request_type': 'UpdateDatabaseDdl'} return future def drop(self): diff --git a/spanner/google/cloud/spanner/instance.py b/spanner/google/cloud/spanner/instance.py index 711b8c489853..e67a0c31be6c 100644 --- a/spanner/google/cloud/spanner/instance.py +++ b/spanner/google/cloud/spanner/instance.py @@ -28,7 +28,6 @@ from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator -from google.cloud.operation import register_type from google.cloud.spanner._helpers import _options_with_prefix from google.cloud.spanner.database import Database from google.cloud.spanner.pool import BurstyPool @@ -41,10 +40,6 @@ DEFAULT_NODE_COUNT = 1 -register_type(admin_v1_pb2.Instance) -register_type(admin_v1_pb2.CreateInstanceMetadata) -register_type(admin_v1_pb2.UpdateInstanceMetadata) - class Instance(object): """Representation of a Cloud Spanner Instance. @@ -204,7 +199,7 @@ def create(self): before calling :meth:`create`. - :rtype: :class:`google.cloud.operation.Operation` + :rtype: :class:`google.cloud.future.operation.Operation` :returns: an operation instance """ api = self._client.instance_admin_api @@ -228,7 +223,6 @@ def create(self): raise Conflict(self.name) raise - future.caller_metadata = {'request_type': 'CreateInstance'} return future def exists(self): @@ -285,7 +279,7 @@ def update(self): before calling :meth:`update`. - :rtype: :class:`google.cloud.operation.Operation` + :rtype: :class:`google.cloud.future.operation.Operation` :returns: an operation instance """ api = self._client.instance_admin_api @@ -309,7 +303,6 @@ def update(self): raise NotFound(self.name) raise - future.caller_metadata = {'request_type': 'UpdateInstance'} return future def delete(self): diff --git a/spanner/tests/unit/test_database.py b/spanner/tests/unit/test_database.py index 5200a0ab7d1b..6216d8a348fd 100644 --- a/spanner/tests/unit/test_database.py +++ b/spanner/tests/unit/test_database.py @@ -312,8 +312,6 @@ def test_create_success(self): future = database.create() self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'CreateDatabase'}) (parent, create_statement, extra_statements, options) = api._created_database @@ -493,8 +491,6 @@ def test_update_ddl(self): future = database.update_ddl(DDL_STATEMENTS) self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'UpdateDatabaseDdl'}) name, statements, op_id, options = api._updated_database_ddl self.assertEqual(name, self.DATABASE_NAME) diff --git a/spanner/tests/unit/test_instance.py b/spanner/tests/unit/test_instance.py index d86c611b3ccb..ca8edacf3b81 100644 --- a/spanner/tests/unit/test_instance.py +++ b/spanner/tests/unit/test_instance.py @@ -241,8 +241,6 @@ def test_create_success(self): future = instance.create() self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'CreateInstance'}) (parent, instance_id, instance, options) = api._created_instance self.assertEqual(parent, self.PARENT) @@ -424,8 +422,6 @@ def test_update_success(self): future = instance.update() self.assertIs(future, op_future) - self.assertEqual(future.caller_metadata, - {'request_type': 'UpdateInstance'}) instance, field_mask, options = api._updated_instance self.assertEqual(field_mask.paths, From 71fe7f5ea49cece5485d8e4f5679f2f03d65fea5 Mon Sep 17 00:00:00 2001 From: Dan O'Meara Date: Thu, 20 Jul 2017 10:24:06 -0700 Subject: [PATCH 34/62] Add Video Intelligence to README (#3628) * Add Video Intelligence to README Add Video Intelligence to list of APIs supported at Beta. Link to PyPi and package README. * Adding "Google" to "Cloud Video Intellegence" link. --- README.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 5e3615da46f9..9b3d9f0db64e 100644 --- a/README.rst +++ b/README.rst @@ -34,6 +34,7 @@ The following client libraries have **beta** support: - `Google Cloud Vision`_ (`Vision README`_) - `Google Cloud Natural Language`_ (`Natural Language README`_) - `Google Cloud Translation`_ (`Translation README`_) +- `Google Cloud Video Intelligence`_ (`Video Intelligence README`_) **Beta** indicates that the client library for a particular service is mostly stable and is being prepared for release. Issues and requests @@ -89,8 +90,10 @@ updates. See `versioning`_ for more details. .. _HappyBase README: https://github.com/GoogleCloudPlatform/google-cloud-python-happybase .. _Google Cloud Runtime Configuration: https://cloud.google.com/deployment-manager/runtime-configurator/ .. _Runtime Config README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/runtimeconfig -.. _Cloud Spanner: https://cloud.google.com/spanner/ +.. _Cloud Spanner: https://pypi.python.org/pypi/google-cloud-spanner .. _Cloud Spanner README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/spanner +.. _Google Cloud Video Intelligence: https://pypi.python.org/pypi/google-cloud-videointelligence +.. _Video Intelligence README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/videointelligence .. _versioning: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/CONTRIBUTING.rst#versioning If you need support for other Google APIs, check out the From ab420df29681ceb8aec50bd812ef478ada1fdffc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 10:24:22 -0700 Subject: [PATCH 35/62] Adding badge images to Video Intelligence README. (#3639) --- videointelligence/README.rst | 5 +++++ videointelligence/nox.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/videointelligence/README.rst b/videointelligence/README.rst index e294919b77be..fff70efacf7b 100644 --- a/videointelligence/README.rst +++ b/videointelligence/README.rst @@ -35,3 +35,8 @@ learning models in an easy to use REST API. .. _Video Intelligence: https://cloud.google.com/videointelligence/ .. _Video Intelligence API docs: https://cloud.google.com/videointelligence/reference/rest/ + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-videointelligence.svg + :target: https://pypi.org/project/google-cloud-videointelligence/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-videointelligence.svg + :target: https://pypi.org/project/google-cloud-videointelligence/ diff --git a/videointelligence/nox.py b/videointelligence/nox.py index ceba6ff514e5..a76156e43a41 100644 --- a/videointelligence/nox.py +++ b/videointelligence/nox.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 3a17d0a9fdb9caa7741b63371c11954c72576e94 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 10:40:24 -0700 Subject: [PATCH 36/62] Adding Bigtable Cluster location on create() request. (#3646) --- bigtable/google/cloud/bigtable/cluster.py | 1 + bigtable/tests/unit/test_cluster.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/bigtable/google/cloud/bigtable/cluster.py b/bigtable/google/cloud/bigtable/cluster.py index 80b9068958db..8f7321b2a548 100644 --- a/bigtable/google/cloud/bigtable/cluster.py +++ b/bigtable/google/cloud/bigtable/cluster.py @@ -49,6 +49,7 @@ def _prepare_create_request(cluster): parent=cluster._instance.name, cluster_id=cluster.cluster_id, cluster=data_v2_pb2.Cluster( + location=cluster.location, serve_nodes=cluster.serve_nodes, ), ) diff --git a/bigtable/tests/unit/test_cluster.py b/bigtable/tests/unit/test_cluster.py index 3cc40964ba49..3eb18f43863d 100644 --- a/bigtable/tests/unit/test_cluster.py +++ b/bigtable/tests/unit/test_cluster.py @@ -393,12 +393,14 @@ def test_it(self): instance = _Instance(INSTANCE_ID, client) cluster = Cluster(CLUSTER_ID, instance, serve_nodes=SERVE_NODES) + cluster.location = u'projects/prahj-ekt/locations/zona-tres' request_pb = self._call_fut(cluster) self.assertEqual(request_pb.cluster_id, CLUSTER_ID) self.assertEqual(request_pb.parent, instance.name) self.assertEqual(request_pb.cluster.serve_nodes, SERVE_NODES) + self.assertEqual(request_pb.cluster.location, cluster.location) def _ClusterPB(*args, **kw): From c839a307ad2ffd7207d0d2e5dd0533b585c20a6e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 20 Jul 2017 11:00:26 -0700 Subject: [PATCH 37/62] Update bigtable to use future.operation (#3623) --- bigtable/google/cloud/bigtable/cluster.py | 37 +++++++++++----------- bigtable/google/cloud/bigtable/instance.py | 17 ++++------ bigtable/tests/system.py | 27 ++-------------- bigtable/tests/unit/test_cluster.py | 23 ++++++-------- bigtable/tests/unit/test_instance.py | 21 ++++++------ 5 files changed, 46 insertions(+), 79 deletions(-) diff --git a/bigtable/google/cloud/bigtable/cluster.py b/bigtable/google/cloud/bigtable/cluster.py index 8f7321b2a548..8d15547efae3 100644 --- a/bigtable/google/cloud/bigtable/cluster.py +++ b/bigtable/google/cloud/bigtable/cluster.py @@ -21,9 +21,7 @@ instance_pb2 as data_v2_pb2) from google.cloud.bigtable._generated import ( bigtable_instance_admin_pb2 as messages_v2_pb2) -from google.cloud.operation import Operation -from google.cloud.operation import register_type - +from google.cloud.future import operation _CLUSTER_NAME_RE = re.compile(r'^projects/(?P[^/]+)/' r'instances/(?P[^/]+)/clusters/' @@ -33,9 +31,6 @@ """Default number of nodes to use when creating a cluster.""" -register_type(messages_v2_pb2.UpdateClusterMetadata) - - def _prepare_create_request(cluster): """Creates a protobuf request for a CreateCluster request. @@ -208,15 +203,18 @@ def create(self): :returns: The long-running operation corresponding to the create operation. """ - request_pb = _prepare_create_request(self) - # We expect a `google.longrunning.operations_pb2.Operation`. client = self._instance._client + + # We expect a `google.longrunning.operations_pb2.Operation`. + request_pb = _prepare_create_request(self) operation_pb = client._instance_stub.CreateCluster(request_pb) - operation = Operation.from_pb(operation_pb, client) - operation.target = self - operation.caller_metadata['request_type'] = 'CreateCluster' - return operation + operation_future = operation.from_grpc( + operation_pb, + client._operations_stub, + data_v2_pb2.Cluster, + metadata_type=messages_v2_pb2.UpdateClusterMetadata) + return operation_future def update(self): """Update this cluster. @@ -236,18 +234,21 @@ def update(self): :returns: The long-running operation corresponding to the update operation. """ + client = self._instance._client + + # We expect a `google.longrunning.operations_pb2.Operation`. request_pb = data_v2_pb2.Cluster( name=self.name, serve_nodes=self.serve_nodes, ) - # We expect a `google.longrunning.operations_pb2.Operation`. - client = self._instance._client operation_pb = client._instance_stub.UpdateCluster(request_pb) - operation = Operation.from_pb(operation_pb, client) - operation.target = self - operation.caller_metadata['request_type'] = 'UpdateCluster' - return operation + operation_future = operation.from_grpc( + operation_pb, + client._operations_stub, + data_v2_pb2.Cluster, + metadata_type=messages_v2_pb2.UpdateClusterMetadata) + return operation_future def delete(self): """Delete this cluster. diff --git a/bigtable/google/cloud/bigtable/instance.py b/bigtable/google/cloud/bigtable/instance.py index 1de3cbcea814..958f16602953 100644 --- a/bigtable/google/cloud/bigtable/instance.py +++ b/bigtable/google/cloud/bigtable/instance.py @@ -26,8 +26,7 @@ from google.cloud.bigtable.cluster import Cluster from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES from google.cloud.bigtable.table import Table -from google.cloud.operation import Operation -from google.cloud.operation import register_type +from google.cloud.future import operation _EXISTING_INSTANCE_LOCATION_ID = 'see-existing-cluster' @@ -35,10 +34,6 @@ r'instances/(?P[a-z][-a-z0-9]*)$') -register_type(messages_v2_pb2.CreateInstanceMetadata) -register_type(data_v2_pb2.Instance) - - def _prepare_create_request(instance): """Creates a protobuf request for a CreateInstance request. @@ -232,10 +227,12 @@ def create(self): # We expect a `google.longrunning.operations_pb2.Operation`. operation_pb = self._client._instance_stub.CreateInstance(request_pb) - operation = Operation.from_pb(operation_pb, self._client) - operation.target = self - operation.caller_metadata['request_type'] = 'CreateInstance' - return operation + operation_future = operation.from_grpc( + operation_pb, + self._client._operations_stub, + data_v2_pb2.Instance, + metadata_type=messages_v2_pb2.CreateInstanceMetadata) + return operation_future def update(self): """Update this instance. diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 1fcda808db39..cfc2cb17f805 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -32,7 +32,6 @@ from google.cloud.environment_vars import BIGTABLE_EMULATOR from test_utils.retry import RetryErrors -from test_utils.retry import RetryResult from test_utils.system import EmulatorCreds from test_utils.system import unique_resource_id @@ -65,27 +64,6 @@ class Config(object): IN_EMULATOR = False -def _wait_until_complete(operation, max_attempts=5): - """Wait until an operation has completed. - - :type operation: :class:`google.cloud.operation.Operation` - :param operation: Operation that has not completed. - - :type max_attempts: int - :param max_attempts: (Optional) The maximum number of times to check if - the operation has completed. Defaults to 5. - - :rtype: bool - :returns: Boolean indicating if the operation is complete. - """ - - def _operation_complete(result): - return result - - retry = RetryResult(_operation_complete, max_tries=max_attempts) - return retry(operation.poll)() - - def _retry_on_unavailable(exc): """Retry only errors whose status code is 'UNAVAILABLE'.""" from grpc import StatusCode @@ -117,8 +95,7 @@ def setUpModule(): # After listing, create the test instance. created_op = Config.INSTANCE.create() - if not _wait_until_complete(created_op): - raise RuntimeError('Instance creation exceed 5 seconds.') + created_op.result(timeout=10) def tearDownModule(): @@ -166,7 +143,7 @@ def test_create_instance(self): self.instances_to_delete.append(instance) # We want to make sure the operation completes. - self.assertTrue(_wait_until_complete(operation)) + operation.result(timeout=10) # Create a new instance instance and make sure it is the same. instance_alt = Config.CLIENT.instance(ALT_INSTANCE_ID, LOCATION_ID) diff --git a/bigtable/tests/unit/test_cluster.py b/bigtable/tests/unit/test_cluster.py index 3eb18f43863d..e244b55d6dff 100644 --- a/bigtable/tests/unit/test_cluster.py +++ b/bigtable/tests/unit/test_cluster.py @@ -15,6 +15,8 @@ import unittest +import mock + class TestCluster(unittest.TestCase): @@ -232,7 +234,7 @@ def test_reload(self): def test_create(self): from google.longrunning import operations_pb2 - from google.cloud.operation import Operation + from google.cloud.future import operation from google.cloud.bigtable._generated import ( bigtable_instance_admin_pb2 as messages_v2_pb2) from tests.unit._testing import _FakeStub @@ -256,13 +258,9 @@ def test_create(self): # Perform the method and check the result. result = cluster.create() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, OP_NAME) - self.assertIs(result.target, cluster) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, OP_NAME) self.assertIsNone(result.metadata) - self.assertEqual(result.caller_metadata, - {'request_type': 'CreateCluster'}) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -278,7 +276,7 @@ def test_create(self): def test_update(self): import datetime from google.longrunning import operations_pb2 - from google.cloud.operation import Operation + from google.cloud.future import operation from google.protobuf.any_pb2 import Any from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud.bigtable._generated import ( @@ -324,15 +322,11 @@ def test_update(self): result = cluster.update() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, OP_NAME) - self.assertIs(result.target, cluster) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, OP_NAME) self.assertIsInstance(result.metadata, messages_v2_pb2.UpdateClusterMetadata) self.assertEqual(result.metadata.request_time, NOW_PB) - self.assertEqual(result.caller_metadata, - {'request_type': 'UpdateCluster'}) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -448,6 +442,7 @@ class _Client(object): def __init__(self, project): self.project = project self.project_name = 'projects/' + self.project + self._operations_stub = mock.sentinel.operations_stub def __eq__(self, other): return (other.project == self.project and diff --git a/bigtable/tests/unit/test_instance.py b/bigtable/tests/unit/test_instance.py index cdad3c376d0a..03c0034fc49e 100644 --- a/bigtable/tests/unit/test_instance.py +++ b/bigtable/tests/unit/test_instance.py @@ -15,6 +15,8 @@ import unittest +import mock + class TestInstance(unittest.TestCase): @@ -236,7 +238,7 @@ def test_create(self): bigtable_instance_admin_pb2 as messages_v2_pb2) from google.cloud._helpers import _datetime_to_pb_timestamp from tests.unit._testing import _FakeStub - from google.cloud.operation import Operation + from google.cloud.future import operation from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES NOW = datetime.datetime.utcnow() @@ -263,15 +265,11 @@ def test_create(self): # Perform the method and check the result. result = instance.create() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, self.OP_NAME) - self.assertIs(result.target, instance) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, self.OP_NAME) self.assertIsInstance(result.metadata, messages_v2_pb2.CreateInstanceMetadata) self.assertEqual(result.metadata.request_time, NOW_PB) - self.assertEqual(result.caller_metadata, - {'request_type': 'CreateInstance'}) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -291,7 +289,7 @@ def test_create_w_explicit_serve_nodes(self): from google.cloud.bigtable._generated import ( bigtable_instance_admin_pb2 as messages_v2_pb2) from tests.unit._testing import _FakeStub - from google.cloud.operation import Operation + from google.cloud.future import operation SERVE_NODES = 5 @@ -308,10 +306,8 @@ def test_create_w_explicit_serve_nodes(self): # Perform the method and check the result. result = instance.create() - self.assertIsInstance(result, Operation) - self.assertEqual(result.name, self.OP_NAME) - self.assertIs(result.target, instance) - self.assertIs(result.client, client) + self.assertIsInstance(result, operation.Operation) + self.assertEqual(result.operation.name, self.OP_NAME) self.assertEqual(len(stub.method_calls), 1) api_name, args, kwargs = stub.method_calls[0] @@ -582,6 +578,7 @@ class _Client(object): def __init__(self, project): self.project = project self.project_name = 'projects/' + self.project + self._operations_stub = mock.sentinel.operations_stub def copy(self): from copy import deepcopy From 93d36f43bbc9af93bad40f785585897161b0c53d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 20 Jul 2017 13:08:32 -0700 Subject: [PATCH 38/62] Update auto-gen code for video intelligence (#3643) --- .../video_intelligence_service_client.py | 1 - ...eo_intelligence_service_client_config.json | 4 +- .../v1beta1/video_intelligence_pb2.py | 235 +++++++++++++++++- .../v1beta1/video_intelligence_pb2_grpc.py | 2 - .../cloud/videointelligence_v1beta1/types.py | 1 + 5 files changed, 235 insertions(+), 8 deletions(-) diff --git a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py index 733a95c3240a..5baae515c69c 100644 --- a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py +++ b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client.py @@ -225,7 +225,6 @@ def annotate_video(self, :exc:`google.gax.errors.GaxError` if the RPC is aborted. :exc:`ValueError` if the parameters are invalid. """ - # Create the request object. request = video_intelligence_pb2.AnnotateVideoRequest( input_uri=input_uri, features=features, diff --git a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json index 7dd61bbb7b5d..996b2ab5e30b 100644 --- a/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json +++ b/videointelligence/google/cloud/gapic/videointelligence/v1beta1/video_intelligence_service_client_config.json @@ -6,9 +6,7 @@ "DEADLINE_EXCEEDED", "UNAVAILABLE" ], - "non_idempotent": [ - "UNAVAILABLE" - ] + "non_idempotent": [] }, "retry_params": { "default": { diff --git a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py index 9046090f92d7..7eb2e71d7e05 100644 --- a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py +++ b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2.py @@ -884,6 +884,47 @@ AnnotateVideoRequest = _reflection.GeneratedProtocolMessageType('AnnotateVideoRequest', (_message.Message,), dict( DESCRIPTOR = _ANNOTATEVIDEOREQUEST, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video annotation request. + + + Attributes: + input_uri: + Input video location. Currently, only `Google Cloud Storage + `__ URIs are supported, + which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return [google + .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT + ]). For more information, see `Request URIs + `__. A video URI may include + wildcards in ``object-id``, and thus identify multiple videos. + Supported wildcards: '\*' to match 0 or more characters; '?' + to match 1 character. If unset, the input video should be + embedded in the request as ``input_content``. If set, + ``input_content`` should be unset. + input_content: + The video data bytes. Encoding: base64. If unset, the input + video(s) should be specified via ``input_uri``. If set, + ``input_uri`` should be unset. + features: + Requested video annotation features. + video_context: + Additional video context and/or feature-specific parameters. + output_uri: + Optional location where the output (in JSON format) should be + stored. Currently, only `Google Cloud Storage + `__ URIs are supported, + which must be specified in the following format: + ``gs://bucket-id/object-id`` (other URI formats return [google + .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT + ]). For more information, see `Request URIs + `__. + location_id: + Optional cloud region where annotation should take place. + Supported cloud regions: ``us-east1``, ``us-west1``, ``europe- + west1``, ``asia-east1``. If no region is specified, a region + will be determined based on video file location. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoRequest) )) _sym_db.RegisterMessage(AnnotateVideoRequest) @@ -891,6 +932,36 @@ VideoContext = _reflection.GeneratedProtocolMessageType('VideoContext', (_message.Message,), dict( DESCRIPTOR = _VIDEOCONTEXT, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video context and/or feature-specific parameters. + + + Attributes: + segments: + Video segments to annotate. The segments may overlap and are + not required to be contiguous or span the whole video. If + unspecified, each video is treated as a single segment. + label_detection_mode: + If label detection has been requested, what labels should be + detected in addition to video-level labels or segment-level + labels. If unspecified, defaults to ``SHOT_MODE``. + stationary_camera: + Whether the video has been shot from a stationary (i.e. non- + moving) camera. When set to true, might improve detection + accuracy for moving objects. + label_detection_model: + Model to use for label detection. Supported values: "latest" + and "stable" (the default). + face_detection_model: + Model to use for face detection. Supported values: "latest" + and "stable" (the default). + shot_change_detection_model: + Model to use for shot change detection. Supported values: + "latest" and "stable" (the default). + safe_search_detection_model: + Model to use for safe search detection. Supported values: + "latest" and "stable" (the default). + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoContext) )) _sym_db.RegisterMessage(VideoContext) @@ -898,6 +969,16 @@ VideoSegment = _reflection.GeneratedProtocolMessageType('VideoSegment', (_message.Message,), dict( DESCRIPTOR = _VIDEOSEGMENT, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video segment. + + + Attributes: + start_time_offset: + Start offset in microseconds (inclusive). Unset means 0. + end_time_offset: + End offset in microseconds (inclusive). Unset means 0. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoSegment) )) _sym_db.RegisterMessage(VideoSegment) @@ -905,6 +986,21 @@ LabelLocation = _reflection.GeneratedProtocolMessageType('LabelLocation', (_message.Message,), dict( DESCRIPTOR = _LABELLOCATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Label location. + + + Attributes: + segment: + Video segment. Set to [-1, -1] for video-level labels. Set to + [timestamp, timestamp] for frame-level labels. Otherwise, + corresponds to one of ``AnnotateSpec.segments`` (if specified) + or to shot boundaries (if requested). + confidence: + Confidence that the label is accurate. Range: [0, 1]. + level: + Label level. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelLocation) )) _sym_db.RegisterMessage(LabelLocation) @@ -912,6 +1008,18 @@ LabelAnnotation = _reflection.GeneratedProtocolMessageType('LabelAnnotation', (_message.Message,), dict( DESCRIPTOR = _LABELANNOTATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Label annotation. + + + Attributes: + description: + Textual description, e.g. ``Fixed-gear bicycle``. + language_code: + Language code for ``description`` in BCP-47 format. + locations: + Where the label was detected and with what confidence. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelAnnotation) )) _sym_db.RegisterMessage(LabelAnnotation) @@ -919,6 +1027,29 @@ SafeSearchAnnotation = _reflection.GeneratedProtocolMessageType('SafeSearchAnnotation', (_message.Message,), dict( DESCRIPTOR = _SAFESEARCHANNOTATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Safe search annotation (based on per-frame visual signals only). If no + unsafe content has been detected in a frame, no annotations are present + for that frame. If only some types of unsafe content have been detected + in a frame, the likelihood is set to ``UNKNOWN`` for all other types of + unsafe content. + + + Attributes: + adult: + Likelihood of adult content. + spoof: + Likelihood that an obvious modification was made to the + original version to make it appear funny or offensive. + medical: + Likelihood of medical content. + violent: + Likelihood of violent content. + racy: + Likelihood of racy content. + time_offset: + Video time offset in microseconds. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.SafeSearchAnnotation) )) _sym_db.RegisterMessage(SafeSearchAnnotation) @@ -926,6 +1057,20 @@ BoundingBox = _reflection.GeneratedProtocolMessageType('BoundingBox', (_message.Message,), dict( DESCRIPTOR = _BOUNDINGBOX, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Bounding box. + + + Attributes: + left: + Left X coordinate. + right: + Right X coordinate. + bottom: + Bottom Y coordinate. + top: + Top Y coordinate. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.BoundingBox) )) _sym_db.RegisterMessage(BoundingBox) @@ -933,6 +1078,16 @@ FaceLocation = _reflection.GeneratedProtocolMessageType('FaceLocation', (_message.Message,), dict( DESCRIPTOR = _FACELOCATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Face location. + + + Attributes: + bounding_box: + Bounding box in a frame. + time_offset: + Video time offset in microseconds. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceLocation) )) _sym_db.RegisterMessage(FaceLocation) @@ -940,6 +1095,21 @@ FaceAnnotation = _reflection.GeneratedProtocolMessageType('FaceAnnotation', (_message.Message,), dict( DESCRIPTOR = _FACEANNOTATION, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Face annotation. + + + Attributes: + thumbnail: + Thumbnail of a representative face view (in JPEG format). + Encoding: base64. + segments: + All locations where a face was detected. Faces are detected + and tracked on a per-video basis (as opposed to across + multiple videos). + locations: + Face locations at one frame per second. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceAnnotation) )) _sym_db.RegisterMessage(FaceAnnotation) @@ -947,6 +1117,29 @@ VideoAnnotationResults = _reflection.GeneratedProtocolMessageType('VideoAnnotationResults', (_message.Message,), dict( DESCRIPTOR = _VIDEOANNOTATIONRESULTS, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Annotation results for a single video. + + + Attributes: + input_uri: + Video file location in `Google Cloud Storage + `__. + label_annotations: + Label annotations. There is exactly one element for each + unique label. + face_annotations: + Face annotations. There is exactly one element for each unique + face. + shot_annotations: + Shot annotations. Each shot is represented as a video segment. + safe_search_annotations: + Safe search annotations. + error: + If set, indicates an error. Note that for a single + ``AnnotateVideoRequest`` some videos may succeed and some may + fail. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationResults) )) _sym_db.RegisterMessage(VideoAnnotationResults) @@ -954,6 +1147,17 @@ AnnotateVideoResponse = _reflection.GeneratedProtocolMessageType('AnnotateVideoResponse', (_message.Message,), dict( DESCRIPTOR = _ANNOTATEVIDEORESPONSE, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video annotation response. Included in the ``response`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + + Attributes: + annotation_results: + Annotation results for all videos specified in + ``AnnotateVideoRequest``. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoResponse) )) _sym_db.RegisterMessage(AnnotateVideoResponse) @@ -961,6 +1165,22 @@ VideoAnnotationProgress = _reflection.GeneratedProtocolMessageType('VideoAnnotationProgress', (_message.Message,), dict( DESCRIPTOR = _VIDEOANNOTATIONPROGRESS, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Annotation progress for a single video. + + + Attributes: + input_uri: + Video file location in `Google Cloud Storage + `__. + progress_percent: + Approximate percentage processed thus far. Guaranteed to be + 100 when fully processed. + start_time: + Time when the request was received. + update_time: + Time of the most recent update. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationProgress) )) _sym_db.RegisterMessage(VideoAnnotationProgress) @@ -968,6 +1188,17 @@ AnnotateVideoProgress = _reflection.GeneratedProtocolMessageType('AnnotateVideoProgress', (_message.Message,), dict( DESCRIPTOR = _ANNOTATEVIDEOPROGRESS, __module__ = 'google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2' + , + __doc__ = """Video annotation progress. Included in the ``metadata`` field of the + ``Operation`` returned by the ``GetOperation`` call of the + ``google::longrunning::Operations`` service. + + + Attributes: + annotation_progress: + Progress metadata for all videos specified in + ``AnnotateVideoRequest``. + """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoProgress) )) _sym_db.RegisterMessage(AnnotateVideoProgress) @@ -979,10 +1210,10 @@ # THESE ELEMENTS WILL BE DEPRECATED. # Please use the generated *_pb2_grpc.py files instead. import grpc - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities from grpc.beta import implementations as beta_implementations from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities class VideoIntelligenceServiceStub(object): diff --git a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py index 4ea0e1df20f5..ca09db976c12 100644 --- a/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py +++ b/videointelligence/google/cloud/proto/videointelligence/v1beta1/video_intelligence_pb2_grpc.py @@ -1,7 +1,5 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from grpc.framework.common import cardinality -from grpc.framework.interfaces.face import utilities as face_utilities import google.cloud.proto.videointelligence.v1beta1.video_intelligence_pb2 as google_dot_cloud_dot_proto_dot_videointelligence_dot_v1beta1_dot_video__intelligence__pb2 import google.longrunning.operations_pb2 as google_dot_longrunning_dot_operations__pb2 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/types.py b/videointelligence/google/cloud/videointelligence_v1beta1/types.py index 9ac3b8a6b2a5..bfc99c3ab24b 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/types.py +++ b/videointelligence/google/cloud/videointelligence_v1beta1/types.py @@ -21,6 +21,7 @@ names = [] for name, message in get_messages(video_intelligence_pb2).items(): + message.__module__ = 'google.cloud.videointelligence_v1beta1.types' setattr(sys.modules[__name__], name, message) names.append(name) From 7f1a6ba3d262a75d3863026ba50f58d55ecc07d4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 20 Jul 2017 18:18:02 -0400 Subject: [PATCH 39/62] Add systests for read/query w/ concurrent updates. (#3632) * Also add systest for user exception aborting transaction. --- spanner/tests/_fixtures.py | 4 + spanner/tests/system/test_system.py | 116 ++++++++++++++++++++++++++++ 2 files changed, 120 insertions(+) diff --git a/spanner/tests/_fixtures.py b/spanner/tests/_fixtures.py index 1123d03c3f2d..ace9b981b6ec 100644 --- a/spanner/tests/_fixtures.py +++ b/spanner/tests/_fixtures.py @@ -38,6 +38,10 @@ description STRING(16), exactly_hwhen TIMESTAMP) PRIMARY KEY (eye_d); +CREATE TABLE counters ( + name STRING(1024), + value INT64 ) + PRIMARY KEY (name); """ DDL_STATEMENTS = [stmt.strip() for stmt in DDL.split(';') if stmt.strip()] diff --git a/spanner/tests/system/test_system.py b/spanner/tests/system/test_system.py index b4ac62194bb1..e6d73f977e94 100644 --- a/spanner/tests/system/test_system.py +++ b/spanner/tests/system/test_system.py @@ -17,6 +17,7 @@ import operator import os import struct +import threading import unittest from google.cloud.proto.spanner.v1.type_pb2 import ARRAY @@ -358,6 +359,11 @@ class TestSessionAPI(unittest.TestCase, _TestData): 'description', 'exactly_hwhen', ) + COUNTERS_TABLE = 'counters' + COUNTERS_COLUMNS = ( + 'name', + 'value', + ) SOME_DATE = datetime.date(2011, 1, 17) SOME_TIME = datetime.datetime(1989, 1, 17, 17, 59, 12, 345612) NANO_TIME = TimestampWithNanoseconds(1995, 8, 31, nanosecond=987654321) @@ -482,6 +488,31 @@ def test_transaction_read_and_insert_then_rollback(self): rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL)) self.assertEqual(rows, []) + def _transaction_read_then_raise(self, transaction): + rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL)) + self.assertEqual(len(rows), 0) + transaction.insert(self.TABLE, self.COLUMNS, self.ROW_DATA) + raise CustomException() + + @RetryErrors(exception=GrpcRendezvous) + def test_transaction_read_and_insert_then_execption(self): + retry = RetryInstanceState(_has_all_ddl) + retry(self._db.reload)() + + session = self._db.session() + session.create() + self.to_delete.append(session) + + with session.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + with self.assertRaises(CustomException): + session.run_in_transaction(self._transaction_read_then_raise) + + # Transaction was rolled back. + rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL)) + self.assertEqual(rows, []) + @RetryErrors(exception=GrpcRendezvous) def test_transaction_read_and_insert_or_update_then_commit(self): retry = RetryInstanceState(_has_all_ddl) @@ -508,6 +539,87 @@ def test_transaction_read_and_insert_or_update_then_commit(self): rows = list(session.read(self.TABLE, self.COLUMNS, self.ALL)) self._check_row_data(rows) + def _transaction_concurrency_helper(self, unit_of_work, pkey): + INITIAL_VALUE = 123 + NUM_THREADS = 3 # conforms to equivalent Java systest. + + retry = RetryInstanceState(_has_all_ddl) + retry(self._db.reload)() + + session = self._db.session() + session.create() + self.to_delete.append(session) + + with session.batch() as batch: + batch.insert_or_update( + self.COUNTERS_TABLE, + self.COUNTERS_COLUMNS, + [[pkey, INITIAL_VALUE]]) + + # We don't want to run the threads' transactions in the current + # session, which would fail. + txn_sessions = [] + + for _ in range(NUM_THREADS): + txn_session = self._db.session() + txn_sessions.append(txn_session) + txn_session.create() + self.to_delete.append(txn_session) + + threads = [ + threading.Thread( + target=txn_session.run_in_transaction, + args=(unit_of_work, pkey)) + for txn_session in txn_sessions] + + for thread in threads: + thread.start() + + for thread in threads: + thread.join() + + keyset = KeySet(keys=[(pkey,)]) + rows = list(session.read( + self.COUNTERS_TABLE, self.COUNTERS_COLUMNS, keyset)) + self.assertEqual(len(rows), 1) + _, value = rows[0] + self.assertEqual(value, INITIAL_VALUE + len(threads)) + + def _read_w_concurrent_update(self, transaction, pkey): + keyset = KeySet(keys=[(pkey,)]) + rows = list(transaction.read( + self.COUNTERS_TABLE, self.COUNTERS_COLUMNS, keyset)) + self.assertEqual(len(rows), 1) + pkey, value = rows[0] + transaction.update( + self.COUNTERS_TABLE, + self.COUNTERS_COLUMNS, + [[pkey, value + 1]]) + + def test_transaction_read_w_concurrent_updates(self): + PKEY = 'read_w_concurrent_updates' + self._transaction_concurrency_helper( + self._read_w_concurrent_update, PKEY) + + def _query_w_concurrent_update(self, transaction, pkey): + SQL = 'SELECT * FROM counters WHERE name = @name' + rows = list(transaction.execute_sql( + SQL, + params={'name': pkey}, + param_types={'name': Type(code=STRING)}, + )) + self.assertEqual(len(rows), 1) + pkey, value = rows[0] + transaction.update( + self.COUNTERS_TABLE, + self.COUNTERS_COLUMNS, + [[pkey, value + 1]]) + + def test_transaction_query_w_concurrent_updates(self): + PKEY = 'query_w_concurrent_updates' + self._transaction_concurrency_helper( + self._query_w_concurrent_update, PKEY) + @staticmethod def _row_data(max_index): for index in range(max_index): @@ -910,6 +1022,10 @@ def test_four_meg(self): self._verify_two_columns(FOUR_MEG) +class CustomException(Exception): + """Placeholder for any user-defined exception.""" + + class _DatabaseDropper(object): """Helper for cleaning up databases created on-the-fly.""" From 322dd75daf746cb6442257c1830426c0c5b3a2aa Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Jul 2017 14:41:13 -0700 Subject: [PATCH 40/62] Switched to google-resumable-media in BigQuery. (#3555) * Switched to google-resumable-media in BigQuery. * Upgrading google-resumable-media dependency to 0.2.1. --- bigquery/google/cloud/bigquery/table.py | 432 ++++++++---- bigquery/nox.py | 33 +- bigquery/setup.py | 3 + bigquery/tests/unit/test_table.py | 872 ++++++++++++++---------- storage/google/cloud/storage/blob.py | 1 + storage/setup.py | 2 +- storage/tests/unit/test_blob.py | 8 +- 7 files changed, 867 insertions(+), 484 deletions(-) diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 7e21e35d1fb0..f7752bb8fc36 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -15,22 +15,21 @@ """Define API Datasets.""" import datetime -import json import os import httplib2 import six +import google.auth.transport.requests +from google import resumable_media +from google.resumable_media.requests import MultipartUpload +from google.resumable_media.requests import ResumableUpload + from google.cloud._helpers import _datetime_from_microseconds from google.cloud._helpers import _millis_from_datetime from google.cloud.exceptions import NotFound from google.cloud.exceptions import make_exception from google.cloud.iterator import HTTPIterator -from google.cloud.streaming.exceptions import HttpError -from google.cloud.streaming.http_wrapper import Request -from google.cloud.streaming.http_wrapper import make_api_request -from google.cloud.streaming.transfer import RESUMABLE_UPLOAD -from google.cloud.streaming.transfer import Upload from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery._helpers import _item_to_row from google.cloud.bigquery._helpers import _rows_page_start @@ -39,6 +38,17 @@ _TABLE_HAS_NO_SCHEMA = "Table has no schema: call 'table.reload()'" _MARKER = object() +_DEFAULT_CHUNKSIZE = 1048576 # 1024 * 1024 B = 1 MB +_BASE_UPLOAD_TEMPLATE = ( + u'https://www.googleapis.com/upload/bigquery/v2/projects/' + u'{project}/jobs?uploadType=') +_MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'multipart' +_RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u'resumable' +_GENERIC_CONTENT_TYPE = u'*/*' +_READ_LESS_THAN_SIZE = ( + 'Size {:d} was specified but the file-like object only had ' + '{:d} bytes remaining.') +_DEFAULT_NUM_RETRIES = 6 class Table(object): @@ -815,15 +825,177 @@ def insert_data(self, return errors - @staticmethod - def _check_response_error(request, http_response): - """Helper for :meth:`upload_from_file`.""" - info = http_response.info - status = int(info['status']) - if not 200 <= status < 300: - faux_response = httplib2.Response({'status': status}) - raise make_exception(faux_response, http_response.content, - error_info=request.url) + def _make_transport(self, client): + """Make an authenticated transport with a client's credentials. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :rtype transport: + :class:`~google.auth.transport.requests.AuthorizedSession` + :returns: The transport (with credentials) that will + make authenticated requests. + """ + # Create a ``requests`` transport with the client's credentials. + transport = google.auth.transport.requests.AuthorizedSession( + client._credentials) + return transport + + def _initiate_resumable_upload(self, client, stream, + metadata, num_retries): + """Initiate a resumable upload. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: tuple + :returns: + Pair of + + * The :class:`~google.resumable_media.requests.ResumableUpload` + that was created + * The ``transport`` used to initiate the upload. + """ + chunk_size = _DEFAULT_CHUNKSIZE + transport = self._make_transport(client) + headers = _get_upload_headers(client._connection.USER_AGENT) + upload_url = _RESUMABLE_URL_TEMPLATE.format(project=self.project) + upload = ResumableUpload(upload_url, chunk_size, headers=headers) + + if num_retries is not None: + upload._retry_strategy = resumable_media.RetryStrategy( + max_retries=num_retries) + + upload.initiate( + transport, stream, metadata, _GENERIC_CONTENT_TYPE, + stream_final=False) + + return upload, transport + + def _do_resumable_upload(self, client, stream, metadata, num_retries): + """Perform a resumable upload. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: :class:`~requests.Response` + :returns: The "200 OK" response object returned after the final chunk + is uploaded. + """ + upload, transport = self._initiate_resumable_upload( + client, stream, metadata, num_retries) + + while not upload.finished: + response = upload.transmit_next_chunk(transport) + + return response + + def _do_multipart_upload(self, client, stream, metadata, + size, num_retries): + """Perform a multipart upload. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type size: int + :param size: The number of bytes to be uploaded (which will be read + from ``stream``). If not provided, the upload will be + concluded once ``stream`` is exhausted (or :data:`None`). + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: :class:`~requests.Response` + :returns: The "200 OK" response object returned after the multipart + upload request. + :raises: :exc:`ValueError` if the ``stream`` has fewer than ``size`` + bytes remaining. + """ + data = stream.read(size) + if len(data) < size: + msg = _READ_LESS_THAN_SIZE.format(size, len(data)) + raise ValueError(msg) + + transport = self._make_transport(client) + headers = _get_upload_headers(client._connection.USER_AGENT) + + upload_url = _MULTIPART_URL_TEMPLATE.format(project=self.project) + upload = MultipartUpload(upload_url, headers=headers) + + if num_retries is not None: + upload._retry_strategy = resumable_media.RetryStrategy( + max_retries=num_retries) + + response = upload.transmit( + transport, data, metadata, _GENERIC_CONTENT_TYPE) + + return response + + def _do_upload(self, client, stream, metadata, size, num_retries): + """Determine an upload strategy and then perform the upload. + + If ``size`` is :data:`None`, then a resumable upload will be used, + otherwise the content and the metadata will be uploaded + in a single multipart upload request. + + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: The client to use. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :type metadata: dict + :param metadata: The metadata associated with the upload. + + :type size: int + :param size: The number of bytes to be uploaded (which will be read + from ``stream``). If not provided, the upload will be + concluded once ``stream`` is exhausted (or :data:`None`). + + :type num_retries: int + :param num_retries: Number of upload retries. (Deprecated: This + argument will be removed in a future release.) + + :rtype: dict + :returns: The parsed JSON from the "200 OK" response. This will be the + **only** response in the multipart case and it will be the + **final** response in the resumable case. + """ + if size is None: + response = self._do_resumable_upload( + client, stream, metadata, num_retries) + else: + response = self._do_multipart_upload( + client, stream, metadata, size, num_retries) + + return response.json() # pylint: disable=too-many-arguments,too-many-locals def upload_from_file(self, @@ -831,7 +1003,7 @@ def upload_from_file(self, source_format, rewind=False, size=None, - num_retries=6, + num_retries=_DEFAULT_NUM_RETRIES, allow_jagged_rows=None, allow_quoted_newlines=None, create_disposition=None, @@ -846,10 +1018,6 @@ def upload_from_file(self, job_name=None): """Upload the contents of this table from a file-like object. - The content type of the upload will either be - - The value passed in to the function (if any) - - ``text/csv``. - :type file_obj: file :param file_obj: A file handle opened in binary mode for reading. @@ -860,7 +1028,7 @@ def upload_from_file(self, :type rewind: bool :param rewind: If True, seek to the beginning of the file handle before - writing the file to Cloud Storage. + writing the file. :type size: int :param size: The number of bytes to read from the file handle. @@ -911,16 +1079,16 @@ def upload_from_file(self, :param write_disposition: job configuration option; see :meth:`google.cloud.bigquery.job.LoadJob`. - :type client: :class:`~google.cloud.storage.client.Client` or - ``NoneType`` - :param client: Optional. The client to use. If not passed, falls back - to the ``client`` stored on the current dataset. + :type client: :class:`~google.cloud.bigquery.client.Client` + :param client: (Optional) The client to use. If not passed, falls back + to the ``client`` stored on the current table. :type job_name: str :param job_name: Optional. The id of the job. Generated if not explicitly passed in. - :rtype: :class:`google.cloud.bigquery.jobs.LoadTableFromStorageJob` + :rtype: :class:`~google.cloud.bigquery.jobs.LoadTableFromStorageJob` + :returns: the job instance used to load the data (e.g., for querying status). Note that the job is already started: do not call ``job.begin()``. @@ -929,54 +1097,10 @@ def upload_from_file(self, a file opened in text mode. """ client = self._require_client(client) - connection = client._connection - content_type = 'application/octet-stream' - - # Rewind the file if desired. - if rewind: - file_obj.seek(0, os.SEEK_SET) - - mode = getattr(file_obj, 'mode', None) - - if mode is not None and mode not in ('rb', 'r+b', 'rb+'): - raise ValueError( - "Cannot upload files opened in text mode: use " - "open(filename, mode='rb') or open(filename, mode='r+b')") - - # Get the basic stats about the file. - total_bytes = size - if total_bytes is None: - if hasattr(file_obj, 'fileno'): - total_bytes = os.fstat(file_obj.fileno()).st_size - else: - raise ValueError('total bytes could not be determined. Please ' - 'pass an explicit size.') - headers = { - 'Accept': 'application/json', - 'Accept-Encoding': 'gzip, deflate', - 'User-Agent': connection.USER_AGENT, - 'content-type': 'application/json', - } - - metadata = { - 'configuration': { - 'load': { - 'sourceFormat': source_format, - 'destinationTable': { - 'projectId': self._dataset.project, - 'datasetId': self._dataset.name, - 'tableId': self.name, - } - } - } - } - - if len(self._schema) > 0: - load_config = metadata['configuration']['load'] - load_config['schema'] = { - 'fields': _build_schema_resource(self._schema) - } - + _maybe_rewind(file_obj, rewind=rewind) + _check_mode(file_obj) + metadata = _get_upload_metadata( + source_format, self._schema, self._dataset, self.name) _configure_job_metadata(metadata, allow_jagged_rows, allow_quoted_newlines, create_disposition, encoding, field_delimiter, @@ -984,47 +1108,12 @@ def upload_from_file(self, quote_character, skip_leading_rows, write_disposition, job_name) - upload = Upload(file_obj, content_type, total_bytes, - auto_transfer=False) - - url_builder = _UrlBuilder() - upload_config = _UploadConfig() - - # Base URL may change once we know simple vs. resumable. - base_url = connection.API_BASE_URL + '/upload' - path = '/projects/%s/jobs' % (self._dataset.project,) - upload_url = connection.build_api_url(api_base_url=base_url, path=path) - - # Use apitools 'Upload' facility. - request = Request(upload_url, 'POST', headers, - body=json.dumps(metadata)) - - upload.configure_request(upload_config, request, url_builder) - query_params = url_builder.query_params - base_url = connection.API_BASE_URL + '/upload' - request.url = connection.build_api_url(api_base_url=base_url, - path=path, - query_params=query_params) try: - upload.initialize_upload(request, connection.http) - except HttpError as err_response: - faux_response = httplib2.Response(err_response.response) - raise make_exception(faux_response, err_response.content, - error_info=request.url) - - if upload.strategy == RESUMABLE_UPLOAD: - http_response = upload.stream_file(use_chunks=True) - else: - http_response = make_api_request(connection.http, request, - retries=num_retries) - - self._check_response_error(request, http_response) - - response_content = http_response.content - if not isinstance(response_content, - six.string_types): # pragma: NO COVER Python3 - response_content = response_content.decode('utf-8') - return client.job_from_resource(json.loads(response_content)) + created_json = self._do_upload( + client, file_obj, metadata, size, num_retries) + return client.job_from_resource(created_json) + except resumable_media.InvalidResponse as exc: + _raise_from_invalid_response(exc) # pylint: enable=too-many-arguments,too-many-locals @@ -1122,20 +1211,109 @@ def _build_schema_resource(fields): info['fields'] = _build_schema_resource(field.fields) infos.append(info) return infos +# pylint: enable=unused-argument + +def _maybe_rewind(stream, rewind=False): + """Rewind the stream if desired. -class _UploadConfig(object): - """Faux message FBO apitools' 'configure_request'.""" - accept = ['*/*'] - max_size = None - resumable_multipart = True - resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' - simple_multipart = True - simple_path = u'/upload/bigquery/v2/projects/{project}/jobs' + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + :type rewind: bool + :param rewind: Indicates if we should seek to the beginning of the stream. + """ + if rewind: + stream.seek(0, os.SEEK_SET) -class _UrlBuilder(object): - """Faux builder FBO apitools' 'configure_request'""" - def __init__(self): - self.query_params = {} - self._relative_path = '' + +def _check_mode(stream): + """Check that a stream was opened in read-binary mode. + + :type stream: IO[bytes] + :param stream: A bytes IO object open for reading. + + :raises: :exc:`ValueError` if the ``stream.mode`` is a valid attribute + and is not among ``rb``, ``r+b`` or ``rb+``. + """ + mode = getattr(stream, 'mode', None) + + if mode is not None and mode not in ('rb', 'r+b', 'rb+'): + raise ValueError( + "Cannot upload files opened in text mode: use " + "open(filename, mode='rb') or open(filename, mode='r+b')") + + +def _get_upload_headers(user_agent): + """Get the headers for an upload request. + + :type user_agent: str + :param user_agent: The user-agent for requests. + + :rtype: dict + :returns: The headers to be used for the request. + """ + return { + 'Accept': 'application/json', + 'Accept-Encoding': 'gzip, deflate', + 'User-Agent': user_agent, + 'content-type': 'application/json', + } + + +def _get_upload_metadata(source_format, schema, dataset, name): + """Get base metadata for creating a table. + + :type source_format: str + :param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'. + job configuration option. + + :type schema: list + :param schema: List of :class:`SchemaField` associated with a table. + + :type dataset: :class:`~google.cloud.bigquery.dataset.Dataset` + :param dataset: A dataset which contains a table. + + :type name: str + :param name: The name of the table. + + :rtype: dict + :returns: The metadata dictionary. + """ + load_config = { + 'sourceFormat': source_format, + 'destinationTable': { + 'projectId': dataset.project, + 'datasetId': dataset.name, + 'tableId': name, + }, + } + if schema: + load_config['schema'] = { + 'fields': _build_schema_resource(schema), + } + + return { + 'configuration': { + 'load': load_config, + }, + } + + +def _raise_from_invalid_response(error, error_info=None): + """Re-wrap and raise an ``InvalidResponse`` exception. + + :type error: :exc:`google.resumable_media.InvalidResponse` + :param error: A caught exception from the ``google-resumable-media`` + library. + + :type error_info: str + :param error_info: (Optional) Extra information about the failed request. + + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` corresponding + to the failed status code + """ + response = error.response + faux_response = httplib2.Response({'status': response.status_code}) + raise make_exception(faux_response, response.content, + error_info=error_info, use_json=False) diff --git a/bigquery/nox.py b/bigquery/nox.py index 19a8f5761701..989965443159 100644 --- a/bigquery/nox.py +++ b/bigquery/nox.py @@ -19,7 +19,9 @@ import nox -LOCAL_DEPS = ('../core/',) +LOCAL_DEPS = ( + os.path.join('..', 'core'), +) @nox.session @@ -38,10 +40,17 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.bigquery', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.bigquery', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) @@ -63,11 +72,19 @@ def system_tests(session, python_version): # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) - session.install('../storage/', '../test_utils/') + session.install( + os.path.join('..', 'storage'), + os.path.join('..', 'test_utils'), + ) session.install('.') # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/system.py') + session.run( + 'py.test', + '--quiet', + os.path.join('tests', 'system.py'), + *session.posargs + ) @nox.session @@ -81,7 +98,7 @@ def lint(session): session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') - session.run('flake8', 'google/cloud/bigquery') + session.run('flake8', os.path.join('google', 'cloud', 'bigquery')) session.run('flake8', 'tests') session.run( 'gcp-devrel-py-tools', 'run-pylint', diff --git a/bigquery/setup.py b/bigquery/setup.py index 6d61064c88ba..eeb2d90549d8 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -52,6 +52,9 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-auth >= 1.0.0', + 'google-resumable-media >= 0.2.1', + 'requests >= 2.0.0', ] setup( diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index f535e8799628..502c0495f9c9 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -12,8 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +import email +import io +import json import unittest +import mock +from six.moves import http_client +import pytest + class _SchemaBase(object): @@ -31,7 +38,8 @@ def _verifySchema(self, schema, resource): class TestTable(unittest.TestCase, _SchemaBase): - PROJECT = 'project' + + PROJECT = 'prahj-ekt' DS_NAME = 'dataset-name' TABLE_NAME = 'table-name' @@ -1553,312 +1561,476 @@ def _row_data(row): self.assertEqual(req['path'], '/%s' % PATH) self.assertEqual(req['data'], SENT) - def test_upload_from_file_text_mode_file_failure(self): + @mock.patch('google.auth.transport.requests.AuthorizedSession') + def test__make_transport(self, session_factory): + client = mock.Mock(spec=[u'_credentials']) + table = self._make_one(self.TABLE_NAME, None) + transport = table._make_transport(client) - class TextModeFile(object): - mode = 'r' + self.assertIs(transport, session_factory.return_value) + session_factory.assert_called_once_with(client._credentials) - conn = _Connection() - client = _Client(project=self.PROJECT, connection=conn) + @staticmethod + def _mock_requests_response(status_code, headers, content=b''): + return mock.Mock( + content=content, headers=headers, status_code=status_code, + spec=['content', 'headers', 'status_code']) + + def _mock_transport(self, status_code, headers, content=b''): + fake_transport = mock.Mock(spec=['request']) + fake_response = self._mock_requests_response( + status_code, headers, content=content) + fake_transport.request.return_value = fake_response + return fake_transport + + def _initiate_resumable_upload_helper(self, num_retries=None): + from google.resumable_media.requests import ResumableUpload + from google.cloud.bigquery.table import _DEFAULT_CHUNKSIZE + from google.cloud.bigquery.table import _GENERIC_CONTENT_TYPE + from google.cloud.bigquery.table import _get_upload_headers + from google.cloud.bigquery.table import _get_upload_metadata + + connection = _Connection() + client = _Client(self.PROJECT, connection=connection) dataset = _Dataset(client) - file_obj = TextModeFile() - table = self._make_one(self.TABLE_NAME, dataset=dataset) - with self.assertRaises(ValueError): - table.upload_from_file(file_obj, 'CSV', size=1234) + table = self._make_one(self.TABLE_NAME, dataset) - def test_upload_from_file_binary_mode_no_failure(self): - self._upload_from_file_helper(input_file_mode='r+b') + # Create mocks to be checked for doing transport. + resumable_url = 'http://test.invalid?upload_id=hey-you' + response_headers = {'location': resumable_url} + fake_transport = self._mock_transport( + http_client.OK, response_headers) + table._make_transport = mock.Mock( + return_value=fake_transport, spec=[]) + + # Create some mock arguments and call the method under test. + data = b'goodbye gudbi gootbee' + stream = io.BytesIO(data) + metadata = _get_upload_metadata( + 'CSV', table._schema, table._dataset, table.name) + upload, transport = table._initiate_resumable_upload( + client, stream, metadata, num_retries) + + # Check the returned values. + self.assertIsInstance(upload, ResumableUpload) + upload_url = ( + 'https://www.googleapis.com/upload/bigquery/v2/projects/' + + self.PROJECT + + '/jobs?uploadType=resumable') + self.assertEqual(upload.upload_url, upload_url) + expected_headers = _get_upload_headers(connection.USER_AGENT) + self.assertEqual(upload._headers, expected_headers) + self.assertFalse(upload.finished) + self.assertEqual(upload._chunk_size, _DEFAULT_CHUNKSIZE) + self.assertIs(upload._stream, stream) + self.assertIsNone(upload._total_bytes) + self.assertEqual(upload._content_type, _GENERIC_CONTENT_TYPE) + self.assertEqual(upload.resumable_url, resumable_url) + + retry_strategy = upload._retry_strategy + self.assertEqual(retry_strategy.max_sleep, 64.0) + if num_retries is None: + self.assertEqual(retry_strategy.max_cumulative_retry, 600.0) + self.assertIsNone(retry_strategy.max_retries) + else: + self.assertIsNone(retry_strategy.max_cumulative_retry) + self.assertEqual(retry_strategy.max_retries, num_retries) + self.assertIs(transport, fake_transport) + # Make sure we never read from the stream. + self.assertEqual(stream.tell(), 0) + + # Check the mocks. + table._make_transport.assert_called_once_with(client) + request_headers = expected_headers.copy() + request_headers['x-upload-content-type'] = _GENERIC_CONTENT_TYPE + fake_transport.request.assert_called_once_with( + 'POST', + upload_url, + data=json.dumps(metadata).encode('utf-8'), + headers=request_headers, + ) - def test_upload_from_file_size_failure(self): - conn = _Connection() - client = _Client(project=self.PROJECT, connection=conn) - dataset = _Dataset(client) - file_obj = object() - table = self._make_one(self.TABLE_NAME, dataset=dataset) - with self.assertRaises(ValueError): - table.upload_from_file(file_obj, 'CSV', size=None) + def test__initiate_resumable_upload(self): + self._initiate_resumable_upload_helper() - def test_upload_from_file_multipart_w_400(self): - import csv - import datetime - from six.moves.http_client import BAD_REQUEST - from google.cloud._testing import _NamedTemporaryFile - from google.cloud._helpers import UTC - from google.cloud.exceptions import BadRequest + def test__initiate_resumable_upload_with_retry(self): + self._initiate_resumable_upload_helper(num_retries=11) - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - response = {'status': BAD_REQUEST} - conn = _Connection( - (response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) + def _do_multipart_upload_success_helper( + self, get_boundary, num_retries=None): + from google.cloud.bigquery.table import _get_upload_headers + from google.cloud.bigquery.table import _get_upload_metadata + + connection = _Connection() + client = _Client(self.PROJECT, connection=connection) dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + table = self._make_one(self.TABLE_NAME, dataset) - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as file_obj: - writer = csv.writer(file_obj) - writer.writerow(('full_name', 'age', 'joined')) - writer.writerow(('Phred Phlyntstone', 32, WHEN)) + # Create mocks to be checked for doing transport. + fake_transport = self._mock_transport(http_client.OK, {}) + table._make_transport = mock.Mock(return_value=fake_transport, spec=[]) + + # Create some mock arguments. + data = b'Bzzzz-zap \x00\x01\xf4' + stream = io.BytesIO(data) + metadata = _get_upload_metadata( + 'CSV', table._schema, table._dataset, table.name) + size = len(data) + response = table._do_multipart_upload( + client, stream, metadata, size, num_retries) + + # Check the mocks and the returned value. + self.assertIs(response, fake_transport.request.return_value) + self.assertEqual(stream.tell(), size) + table._make_transport.assert_called_once_with(client) + get_boundary.assert_called_once_with() + + upload_url = ( + 'https://www.googleapis.com/upload/bigquery/v2/projects/' + + self.PROJECT + + '/jobs?uploadType=multipart') + payload = ( + b'--==0==\r\n' + + b'content-type: application/json; charset=UTF-8\r\n\r\n' + + json.dumps(metadata).encode('utf-8') + b'\r\n' + + b'--==0==\r\n' + + b'content-type: */*\r\n\r\n' + + data + b'\r\n' + + b'--==0==--') + headers = _get_upload_headers(connection.USER_AGENT) + headers['content-type'] = b'multipart/related; boundary="==0=="' + fake_transport.request.assert_called_once_with( + 'POST', + upload_url, + data=payload, + headers=headers, + ) - with open(temp.name, 'rb') as file_obj: - with self.assertRaises(BadRequest): - table.upload_from_file( - file_obj, 'CSV', rewind=True) + @mock.patch(u'google.resumable_media._upload.get_boundary', + return_value=b'==0==') + def test__do_multipart_upload(self, get_boundary): + self._do_multipart_upload_success_helper(get_boundary) - def _upload_from_file_helper(self, **kw): - import csv - import datetime - from six.moves.http_client import OK - from google.cloud._helpers import UTC - from google.cloud._testing import _NamedTemporaryFile - from google.cloud.bigquery.table import SchemaField + @mock.patch(u'google.resumable_media._upload.get_boundary', + return_value=b'==0==') + def test__do_multipart_upload_with_retry(self, get_boundary): + self._do_multipart_upload_success_helper(get_boundary, num_retries=8) - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - PATH = 'projects/%s/jobs' % (self.PROJECT,) - response = {'status': OK} - conn = _Connection( - (response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) - expected_job = object() - if 'client' in kw: - kw['client']._job = expected_job - else: - client._job = expected_job - input_file_mode = kw.pop('input_file_mode', 'rb') - dataset = _Dataset(client) - full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') - age = SchemaField('age', 'INTEGER', mode='REQUIRED') - joined = SchemaField('joined', 'TIMESTAMP', mode='NULLABLE') - table = self._make_one(self.TABLE_NAME, dataset=dataset, - schema=[full_name, age, joined]) - ROWS = [ - ('Phred Phlyntstone', 32, WHEN), - ('Bharney Rhubble', 33, WHEN + datetime.timedelta(seconds=1)), - ('Wylma Phlyntstone', 29, WHEN + datetime.timedelta(seconds=2)), - ('Bhettye Rhubble', 27, None), - ] - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as file_obj: - writer = csv.writer(file_obj) - writer.writerow(('full_name', 'age', 'joined')) - writer.writerows(ROWS) - - with open(temp.name, input_file_mode) as file_obj: - BODY = file_obj.read() - explicit_size = kw.pop('_explicit_size', False) - if explicit_size: - kw['size'] = len(BODY) - job = table.upload_from_file( - file_obj, 'CSV', rewind=True, **kw) - - self.assertIs(job, expected_job) - return conn.http._requested, PATH, BODY - - def test_upload_from_file_w_bound_client_multipart(self): - import json - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - from google.cloud._helpers import _to_bytes - - requested, PATH, BODY = self._upload_from_file_helper() - parse_chunk = _email_chunk_parser() - - self.assertEqual(len(requested), 1) - req = requested[0] - self.assertEqual(req['method'], 'POST') - uri = req['uri'] - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'example.com') - self.assertEqual(path, '/%s' % PATH) - self.assertEqual(dict(parse_qsl(qs)), - {'uploadType': 'multipart'}) - - ctype, boundary = [x.strip() - for x in req['headers']['content-type'].split(';')] - self.assertEqual(ctype, 'multipart/related') - self.assertTrue(boundary.startswith('boundary="==')) - self.assertTrue(boundary.endswith('=="')) - - divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) - chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog - self.assertEqual(len(chunks), 2) - - text_msg = parse_chunk(chunks[0].strip()) - self.assertEqual(dict(text_msg._headers), - {'Content-Type': 'application/json', - 'MIME-Version': '1.0'}) - metadata = json.loads(text_msg._payload) - load_config = metadata['configuration']['load'] - DESTINATION_TABLE = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, - 'tableId': self.TABLE_NAME, - } - self.assertEqual(load_config['destinationTable'], DESTINATION_TABLE) - self.assertEqual(load_config['sourceFormat'], 'CSV') - - app_msg = parse_chunk(chunks[1].strip()) - self.assertEqual(dict(app_msg._headers), - {'Content-Type': 'application/octet-stream', - 'Content-Transfer-Encoding': 'binary', - 'MIME-Version': '1.0'}) - body = BODY.decode('ascii').rstrip() - body_lines = [line.strip() for line in body.splitlines()] - payload_lines = app_msg._payload.rstrip().splitlines() - self.assertEqual(payload_lines, body_lines) - - def test_upload_from_file_resumable_with_400(self): - import csv - import datetime - import mock - from six.moves.http_client import BAD_REQUEST - from google.cloud.exceptions import BadRequest - from google.cloud._helpers import UTC - from google.cloud._testing import _NamedTemporaryFile +class TestTableUpload(object): + # NOTE: This is a "partner" to `TestTable` meant to test some of the + # "upload" portions of `Table`. It also uses `pytest`-style tests + # rather than `unittest`-style. - WHEN_TS = 1437767599.006 - WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace( - tzinfo=UTC) - initial_response = {'status': BAD_REQUEST} - conn = _Connection( - (initial_response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) + @staticmethod + def _make_table(): + from google.cloud.bigquery import _http + from google.cloud.bigquery import client + from google.cloud.bigquery import dataset + from google.cloud.bigquery import table - class _UploadConfig(object): - accept = ['*/*'] - max_size = None - resumable_multipart = True - resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' - simple_multipart = True - simple_path = u'' # force resumable - dataset = _Dataset(client) - table = self._make_one(self.TABLE_NAME, dataset=dataset) + connection = mock.create_autospec(_http.Connection, instance=True) + client = mock.create_autospec(client.Client, instance=True) + client._connection = connection + client._credentials = mock.sentinel.credentials + client.project = 'project_id' - with mock.patch('google.cloud.bigquery.table._UploadConfig', - new=_UploadConfig): - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as file_obj: - writer = csv.writer(file_obj) - writer.writerow(('full_name', 'age', 'joined')) - writer.writerow(('Phred Phlyntstone', 32, WHEN)) - - with open(temp.name, 'rb') as file_obj: - with self.assertRaises(BadRequest): - table.upload_from_file( - file_obj, 'CSV', rewind=True) - - # pylint: disable=too-many-statements - def test_upload_from_file_w_explicit_client_resumable(self): - import json - import mock - from six.moves.http_client import OK - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - - UPLOAD_PATH = 'https://example.com/upload/test' - initial_response = {'status': OK, 'location': UPLOAD_PATH} - upload_response = {'status': OK} - conn = _Connection( - (initial_response, b'{}'), - (upload_response, b'{}'), - ) - client = _Client(project=self.PROJECT, connection=conn) + dataset = dataset.Dataset('test_dataset', client) + table = table.Table('test_table', dataset) - class _UploadConfig(object): - accept = ['*/*'] - max_size = None - resumable_multipart = True - resumable_path = u'/upload/bigquery/v2/projects/{project}/jobs' - simple_multipart = True - simple_path = u'' # force resumable - - with mock.patch('google.cloud.bigquery.table._UploadConfig', - new=_UploadConfig): - orig_requested, PATH, BODY = self._upload_from_file_helper( - allow_jagged_rows=False, - allow_quoted_newlines=False, - create_disposition='CREATE_IF_NEEDED', - encoding='utf8', - field_delimiter=',', - ignore_unknown_values=False, - max_bad_records=0, - quote_character='"', - skip_leading_rows=1, - write_disposition='WRITE_APPEND', - client=client, - _explicit_size=True) - - self.assertEqual(len(orig_requested), 0) - - requested = conn.http._requested - self.assertEqual(len(requested), 2) - req = requested[0] - self.assertEqual(req['method'], 'POST') - uri = req['uri'] - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'example.com') - self.assertEqual(path, '/%s' % PATH) - self.assertEqual(dict(parse_qsl(qs)), - {'uploadType': 'resumable'}) - - self.assertEqual(req['headers']['content-type'], 'application/json') - metadata = json.loads(req['body']) - load_config = metadata['configuration']['load'] - DESTINATION_TABLE = { - 'projectId': self.PROJECT, - 'datasetId': self.DS_NAME, - 'tableId': self.TABLE_NAME, + return table + + @staticmethod + def _make_response(status_code, content='', headers={}): + """Make a mock HTTP response.""" + import requests + response = mock.create_autospec(requests.Response, instance=True) + response.content = content.encode('utf-8') + response.headers = headers + response.status_code = status_code + return response + + @classmethod + def _make_do_upload_patch(cls, table, method, side_effect=None): + """Patches the low-level upload helpers.""" + if side_effect is None: + side_effect = [cls._make_response( + http_client.OK, + json.dumps({}), + {'Content-Type': 'application/json'})] + return mock.patch.object( + table, method, side_effect=side_effect, autospec=True) + + EXPECTED_CONFIGURATION = { + 'configuration': { + 'load': { + 'sourceFormat': 'CSV', + 'destinationTable': { + 'projectId': 'project_id', + 'datasetId': 'test_dataset', + 'tableId': 'test_table' + } + } + } + } + + @staticmethod + def _make_file_obj(): + return io.BytesIO(b'hello, is it me you\'re looking for?') + + # High-level tests + + def test_upload_from_file_resumable(self): + import google.cloud.bigquery.table + + table = self._make_table() + file_obj = self._make_file_obj() + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload') + with do_upload_patch as do_upload: + table.upload_from_file(file_obj, source_format='CSV') + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + google.cloud.bigquery.table._DEFAULT_NUM_RETRIES) + + def test_upload_file_resumable_metadata(self): + table = self._make_table() + file_obj = self._make_file_obj() + + config_args = { + 'source_format': 'CSV', + 'allow_jagged_rows': False, + 'allow_quoted_newlines': False, + 'create_disposition': 'CREATE_IF_NEEDED', + 'encoding': 'utf8', + 'field_delimiter': ',', + 'ignore_unknown_values': False, + 'max_bad_records': 0, + 'quote_character': '"', + 'skip_leading_rows': 1, + 'write_disposition': 'WRITE_APPEND', + 'job_name': 'oddjob' } - self.assertEqual(load_config['destinationTable'], DESTINATION_TABLE) - self.assertEqual(load_config['sourceFormat'], 'CSV') - self.assertEqual(load_config['allowJaggedRows'], False) - self.assertEqual(load_config['allowQuotedNewlines'], False) - self.assertEqual(load_config['createDisposition'], 'CREATE_IF_NEEDED') - self.assertEqual(load_config['encoding'], 'utf8') - self.assertEqual(load_config['fieldDelimiter'], ',') - self.assertEqual(load_config['ignoreUnknownValues'], False) - self.assertEqual(load_config['maxBadRecords'], 0) - self.assertEqual(load_config['quote'], '"') - self.assertEqual(load_config['skipLeadingRows'], 1) - self.assertEqual(load_config['writeDisposition'], 'WRITE_APPEND') - - req = requested[1] - self.assertEqual(req['method'], 'PUT') - self.assertEqual(req['uri'], UPLOAD_PATH) - headers = req['headers'] - length = len(BODY) - self.assertEqual(headers['Content-Type'], 'application/octet-stream') - self.assertEqual(headers['Content-Range'], - 'bytes 0-%d/%d' % (length - 1, length)) - self.assertEqual(headers['content-length'], '%d' % (length,)) - self.assertEqual(req['body'], BODY) - # pylint: enable=too-many-statements - - def test_upload_from_file_w_jobid(self): - import json - from google.cloud._helpers import _to_bytes - - requested, PATH, BODY = self._upload_from_file_helper(job_name='foo') - parse_chunk = _email_chunk_parser() - req = requested[0] - ctype, boundary = [x.strip() - for x in req['headers']['content-type'].split(';')] - divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) - chunks = req['body'].split(divider)[1:-1] # discard prolog / epilog - text_msg = parse_chunk(chunks[0].strip()) - metadata = json.loads(text_msg._payload) - load_config = metadata['configuration']['load'] - self.assertEqual(load_config['jobReference'], {'jobId': 'foo'}) + + expected_config = { + 'configuration': { + 'load': { + 'sourceFormat': config_args['source_format'], + 'destinationTable': { + 'projectId': table._dataset._client.project, + 'datasetId': table.dataset_name, + 'tableId': table.name + }, + 'allowJaggedRows': config_args['allow_jagged_rows'], + 'allowQuotedNewlines': + config_args['allow_quoted_newlines'], + 'createDisposition': config_args['create_disposition'], + 'encoding': config_args['encoding'], + 'fieldDelimiter': config_args['field_delimiter'], + 'ignoreUnknownValues': + config_args['ignore_unknown_values'], + 'maxBadRecords': config_args['max_bad_records'], + 'quote': config_args['quote_character'], + 'skipLeadingRows': config_args['skip_leading_rows'], + 'writeDisposition': config_args['write_disposition'], + 'jobReference': {'jobId': config_args['job_name']} + } + } + } + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload') + with do_upload_patch as do_upload: + table.upload_from_file( + file_obj, **config_args) + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + expected_config, + mock.ANY) + + def test_upload_from_file_multipart(self): + import google.cloud.bigquery.table + + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_size = 10 + + do_upload_patch = self._make_do_upload_patch( + table, '_do_multipart_upload') + with do_upload_patch as do_upload: + table.upload_from_file( + file_obj, source_format='CSV', size=file_obj_size) + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + file_obj_size, + google.cloud.bigquery.table._DEFAULT_NUM_RETRIES) + + def test_upload_from_file_with_retries(self): + table = self._make_table() + file_obj = self._make_file_obj() + num_retries = 20 + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload') + with do_upload_patch as do_upload: + table.upload_from_file( + file_obj, source_format='CSV', num_retries=num_retries) + + do_upload.assert_called_once_with( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + num_retries) + + def test_upload_from_file_with_rewind(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj.seek(2) + + with self._make_do_upload_patch(table, '_do_resumable_upload'): + table.upload_from_file( + file_obj, source_format='CSV', rewind=True) + + assert file_obj.tell() == 0 + + def test_upload_from_file_failure(self): + from google.resumable_media import InvalidResponse + from google.cloud import exceptions + + table = self._make_table() + file_obj = self._make_file_obj() + + response = self._make_response( + content='Someone is already in this spot.', + status_code=http_client.CONFLICT) + + do_upload_patch = self._make_do_upload_patch( + table, '_do_resumable_upload', + side_effect=InvalidResponse(response)) + + with do_upload_patch, pytest.raises(exceptions.Conflict) as exc_info: + table.upload_from_file( + file_obj, source_format='CSV', rewind=True) + + assert exc_info.value.message == response.content.decode('utf-8') + assert exc_info.value.errors == [] + + def test_upload_from_file_bad_mode(self): + table = self._make_table() + file_obj = mock.Mock(spec=['mode']) + file_obj.mode = 'x' + + with pytest.raises(ValueError): + table.upload_from_file( + file_obj, source_format='CSV',) + + # Low-level tests + + @classmethod + def _make_resumable_upload_responses(cls, size): + """Make a series of responses for a successful resumable upload.""" + from google import resumable_media + + resumable_url = 'http://test.invalid?upload_id=and-then-there-was-1' + initial_response = cls._make_response( + http_client.OK, '', {'location': resumable_url}) + data_response = cls._make_response( + resumable_media.PERMANENT_REDIRECT, + '', {'range': 'bytes=0-{:d}'.format(size - 1)}) + final_response = cls._make_response( + http_client.OK, + json.dumps({'size': size}), + {'Content-Type': 'application/json'}) + return [initial_response, data_response, final_response] + + @staticmethod + def _make_transport_patch(table, responses=None): + """Patch a table's _make_transport method to return given responses.""" + import google.auth.transport.requests + + transport = mock.create_autospec( + google.auth.transport.requests.AuthorizedSession, instance=True) + transport.request.side_effect = responses + return mock.patch.object( + table, '_make_transport', return_value=transport, autospec=True) + + def test__do_resumable_upload(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_len = len(file_obj.getvalue()) + responses = self._make_resumable_upload_responses(file_obj_len) + + with self._make_transport_patch(table, responses) as transport: + result = table._do_resumable_upload( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + None) + + content = result.content.decode('utf-8') + assert json.loads(content) == {'size': file_obj_len} + + # Verify that configuration data was passed in with the initial + # request. + transport.return_value.request.assert_any_call( + 'POST', + mock.ANY, + data=json.dumps(self.EXPECTED_CONFIGURATION).encode('utf-8'), + headers=mock.ANY) + + def test__do_multipart_upload(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_len = len(file_obj.getvalue()) + responses = [self._make_response(http_client.OK)] + + with self._make_transport_patch(table, responses) as transport: + table._do_multipart_upload( + table._dataset._client, + file_obj, + self.EXPECTED_CONFIGURATION, + file_obj_len, + None) + + # Verify that configuration data was passed in with the initial + # request. + request_args = transport.return_value.request.mock_calls[0][2] + request_data = request_args['data'].decode('utf-8') + request_headers = request_args['headers'] + + request_content = email.message_from_string( + 'Content-Type: {}\r\n{}'.format( + request_headers['content-type'].decode('utf-8'), + request_data)) + + # There should be two payloads: the configuration and the binary daya. + configuration_data = request_content.get_payload(0).get_payload() + binary_data = request_content.get_payload(1).get_payload() + + assert json.loads(configuration_data) == self.EXPECTED_CONFIGURATION + assert binary_data.encode('utf-8') == file_obj.getvalue() + + def test__do_multipart_upload_wrong_size(self): + table = self._make_table() + file_obj = self._make_file_obj() + file_obj_len = len(file_obj.getvalue()) + + with pytest.raises(ValueError): + table._do_multipart_upload( + table._dataset._client, + file_obj, + {}, + file_obj_len+1, + None) class Test_parse_schema_resource(unittest.TestCase, _SchemaBase): @@ -1974,6 +2146,70 @@ def test_w_subfields(self): 'mode': 'REQUIRED'}]}) +class Test__get_upload_metadata(unittest.TestCase): + + @staticmethod + def _call_fut(source_format, schema, dataset, name): + from google.cloud.bigquery.table import _get_upload_metadata + + return _get_upload_metadata(source_format, schema, dataset, name) + + def test_empty_schema(self): + source_format = 'AVRO' + dataset = mock.Mock(project='prediction', spec=['name', 'project']) + dataset.name = 'market' # mock.Mock() treats `name` specially. + table_name = 'chairs' + metadata = self._call_fut(source_format, [], dataset, table_name) + + expected = { + 'configuration': { + 'load': { + 'sourceFormat': source_format, + 'destinationTable': { + 'projectId': dataset.project, + 'datasetId': dataset.name, + 'tableId': table_name, + }, + }, + }, + } + self.assertEqual(metadata, expected) + + def test_with_schema(self): + from google.cloud.bigquery.table import SchemaField + + source_format = 'CSV' + full_name = SchemaField('full_name', 'STRING', mode='REQUIRED') + dataset = mock.Mock(project='blind', spec=['name', 'project']) + dataset.name = 'movie' # mock.Mock() treats `name` specially. + table_name = 'teebull-neem' + metadata = self._call_fut( + source_format, [full_name], dataset, table_name) + + expected = { + 'configuration': { + 'load': { + 'sourceFormat': source_format, + 'destinationTable': { + 'projectId': dataset.project, + 'datasetId': dataset.name, + 'tableId': table_name, + }, + 'schema': { + 'fields': [ + { + 'name': full_name.name, + 'type': full_name.field_type, + 'mode': full_name.mode, + }, + ], + }, + }, + }, + } + self.assertEqual(metadata, expected) + + class _Client(object): _query_results = () @@ -1982,9 +2218,6 @@ def __init__(self, project='project', connection=None): self.project = project self._connection = connection - def job_from_resource(self, resource): # pylint: disable=unused-argument - return self._job - def run_sync_query(self, query): return _Query(query, self) @@ -2016,37 +2249,14 @@ def project(self): return self._client.project -class _Responder(object): - - def __init__(self, *responses): - self._responses = responses[:] - self._requested = [] - - def _respond(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - -class _HTTP(_Responder): - - connections = {} # For google-apitools debugging. - - def request(self, uri, method, headers, body, **kw): - if hasattr(body, 'read'): - body = body.read() - return self._respond(uri=uri, method=method, headers=headers, - body=body, **kw) - - -class _Connection(_Responder): +class _Connection(object): API_BASE_URL = 'http://example.com' USER_AGENT = 'testing 1.2.3' def __init__(self, *responses): - super(_Connection, self).__init__(*responses) - self.http = _HTTP(*responses) + self._responses = responses[:] + self._requested = [] def api_request(self, **kw): from google.cloud.exceptions import NotFound @@ -2059,29 +2269,3 @@ def api_request(self, **kw): raise NotFound('miss') else: return response - - def build_api_url(self, path, query_params=None, - api_base_url=API_BASE_URL): - from six.moves.urllib.parse import urlencode - from six.moves.urllib.parse import urlsplit - from six.moves.urllib.parse import urlunsplit - - # Mimic the build_api_url interface. - qs = urlencode(query_params or {}) - scheme, netloc, _, _, _ = urlsplit(api_base_url) - return urlunsplit((scheme, netloc, path, qs, '')) - - -def _email_chunk_parser(): - import six - - if six.PY3: # pragma: NO COVER Python3 - from email.parser import BytesParser - - parser = BytesParser() - return parser.parsebytes - else: - from email.parser import Parser - - parser = Parser() - return parser.parsestr diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index 7d967a3e4901..d03d1364cf40 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -368,6 +368,7 @@ def _make_transport(self, client): :type client: :class:`~google.cloud.storage.client.Client` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. + :rtype transport: :class:`~google.auth.transport.requests.AuthorizedSession` :returns: The transport (with credentials) that will diff --git a/storage/setup.py b/storage/setup.py index d18624f3c13d..8d11055fac77 100644 --- a/storage/setup.py +++ b/storage/setup.py @@ -53,7 +53,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.25.0, < 0.26dev', 'google-auth >= 1.0.0', - 'google-resumable-media >= 0.1.1', + 'google-resumable-media >= 0.2.1', 'requests >= 2.0.0', ] diff --git a/storage/tests/unit/test_blob.py b/storage/tests/unit/test_blob.py index 250a05bd28f4..e2227adbd94a 100644 --- a/storage/tests/unit/test_blob.py +++ b/storage/tests/unit/test_blob.py @@ -775,7 +775,7 @@ def _do_multipart_success(self, mock_get_boundary, size=None, blob._make_transport = mock.Mock(return_value=fake_transport, spec=[]) # Create some mock arguments. - client = mock.sentinel.mock + client = mock.sentinel.client data = b'data here hear hier' stream = io.BytesIO(data) content_type = u'application/xml' @@ -865,7 +865,7 @@ def _initiate_resumable_helper(self, size=None, extra_headers=None, blob._make_transport = mock.Mock(return_value=fake_transport, spec=[]) # Create some mock arguments and call the method under test. - client = mock.sentinel.mock + client = mock.sentinel.client data = b'hello hallo halo hi-low' stream = io.BytesIO(data) content_type = u'text/plain' @@ -1033,7 +1033,7 @@ def _do_resumable_helper(self, use_size=False, num_retries=None): blob._make_transport = mock.Mock(return_value=fake_transport, spec=[]) # Create some mock arguments and call the method under test. - client = mock.sentinel.mock + client = mock.sentinel.client stream = io.BytesIO(data) content_type = u'text/html' response = blob._do_resumable_upload( @@ -1271,7 +1271,7 @@ def _create_resumable_upload_session_helper(self, origin=None, # Create some mock arguments and call the method under test. content_type = u'text/plain' size = 10000 - client = mock.sentinel.mock + client = mock.sentinel.client new_url = blob.create_resumable_upload_session( content_type=content_type, size=size, origin=origin, client=client) From 9898a875125c97a767250e1ccde5f458e3fbd6d2 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 21 Jul 2017 14:50:26 -0700 Subject: [PATCH 41/62] Translate GA (#3650) --- README.rst | 2 +- translate/google/cloud/translate.py | 32 +++++++++++++++++++ .../{translate => translate_v2}/__init__.py | 9 +++--- .../{translate => translate_v2}/_http.py | 2 +- .../{translate => translate_v2}/client.py | 6 ++-- translate/setup.py | 4 +-- translate/tests/system.py | 2 +- translate/tests/unit/test__http.py | 4 +-- translate/tests/unit/test_client.py | 11 +++---- 9 files changed, 52 insertions(+), 20 deletions(-) create mode 100644 translate/google/cloud/translate.py rename translate/google/cloud/{translate => translate_v2}/__init__.py (79%) rename translate/google/cloud/{translate => translate_v2}/_http.py (96%) rename translate/google/cloud/{translate => translate_v2}/client.py (98%) diff --git a/README.rst b/README.rst index 9b3d9f0db64e..3de445aba762 100644 --- a/README.rst +++ b/README.rst @@ -20,6 +20,7 @@ The following client libraries have **GA** support: - `Google Cloud Datastore`_ (`Datastore README`_) - `Stackdriver Logging`_ (`Logging README`_) - `Google Cloud Storage`_ (`Storage README`_) +- `Google Cloud Translation`_ (`Translation README`_) **GA** (general availability) indicates that the client library for a particular service is stable, and that the code surface will not change in @@ -33,7 +34,6 @@ The following client libraries have **beta** support: - `Google BigQuery`_ (`BigQuery README`_) - `Google Cloud Vision`_ (`Vision README`_) - `Google Cloud Natural Language`_ (`Natural Language README`_) -- `Google Cloud Translation`_ (`Translation README`_) - `Google Cloud Video Intelligence`_ (`Video Intelligence README`_) **Beta** indicates that the client library for a particular service is diff --git a/translate/google/cloud/translate.py b/translate/google/cloud/translate.py new file mode 100644 index 000000000000..9a24ceebcd10 --- /dev/null +++ b/translate/google/cloud/translate.py @@ -0,0 +1,32 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Translation API wrapper.""" + + +from google.cloud.translate_v2 import __version__ +from google.cloud.translate_v2.client import Client + +# These constants are essentially deprecated; strings should be used instead. +# They are imported here for backwards compatibility. +from google.cloud.translate_v2.client import BASE +from google.cloud.translate_v2.client import NMT + + +__all__ = ( + '__version__', + 'BASE', + 'Client', + 'NMT', +) diff --git a/translate/google/cloud/translate/__init__.py b/translate/google/cloud/translate_v2/__init__.py similarity index 79% rename from translate/google/cloud/translate/__init__.py rename to translate/google/cloud/translate_v2/__init__.py index bf20faa86bdf..11b762101cf7 100644 --- a/translate/google/cloud/translate/__init__.py +++ b/translate/google/cloud/translate_v2/__init__.py @@ -18,9 +18,10 @@ from pkg_resources import get_distribution __version__ = get_distribution('google-cloud-translate').version -from google.cloud.translate.client import BASE -from google.cloud.translate.client import Client -from google.cloud.translate.client import NMT +from google.cloud.translate_v2.client import Client -__all__ = ['__version__', 'BASE', 'Client', 'NMT'] +__all__ = ( + '__version__', + 'Client', +) diff --git a/translate/google/cloud/translate/_http.py b/translate/google/cloud/translate_v2/_http.py similarity index 96% rename from translate/google/cloud/translate/_http.py rename to translate/google/cloud/translate_v2/_http.py index 0c404f2a4a3b..dedb17ec9e14 100644 --- a/translate/google/cloud/translate/_http.py +++ b/translate/google/cloud/translate_v2/_http.py @@ -16,7 +16,7 @@ from google.cloud import _http -from google.cloud.translate import __version__ +from google.cloud.translate_v2 import __version__ _CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) diff --git a/translate/google/cloud/translate/client.py b/translate/google/cloud/translate_v2/client.py similarity index 98% rename from translate/google/cloud/translate/client.py rename to translate/google/cloud/translate_v2/client.py index 9acd7d65cc47..d72993f0fffd 100644 --- a/translate/google/cloud/translate/client.py +++ b/translate/google/cloud/translate_v2/client.py @@ -20,7 +20,7 @@ from google.cloud._helpers import _to_bytes from google.cloud.client import Client as BaseClient -from google.cloud.translate._http import Connection +from google.cloud.translate_v2._http import Connection ENGLISH_ISO_639 = 'en' @@ -189,8 +189,8 @@ def translate(self, values, target_language=None, format_=None, in the query. :type model: str - :param model: (Optional) The model used to translate the text. The - only accepted values are :attr:`BASE` and :attr:`NMT`. + :param model: (Optional) The model used to translate the text, such + as ``'base'`` or ``'nmt'``. :rtype: str or list :returns: A list of dictionaries for each queried value. Each diff --git a/translate/setup.py b/translate/setup.py index edfaf5cbdc96..12934c6b4e96 100644 --- a/translate/setup.py +++ b/translate/setup.py @@ -35,7 +35,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -56,7 +56,7 @@ setup( name='google-cloud-translate', - version='0.25.0', + version='1.0.0', description='Python Client for Google Cloud Translation API', long_description=README, namespace_packages=[ diff --git a/translate/tests/system.py b/translate/tests/system.py index e4b971e238f0..7403ed3c0510 100644 --- a/translate/tests/system.py +++ b/translate/tests/system.py @@ -56,7 +56,7 @@ def test_translate(self): values = ['hvala ti', 'dankon', 'Me llamo Jeff', 'My name is Jeff'] translations = Config.CLIENT.translate( - values, target_language='de', model=translate.NMT) + values, target_language='de', model='nmt') self.assertEqual(len(values), len(translations)) self.assertEqual( diff --git a/translate/tests/unit/test__http.py b/translate/tests/unit/test__http.py index 1d7f7b4c6c18..2dc6b015d6de 100644 --- a/translate/tests/unit/test__http.py +++ b/translate/tests/unit/test__http.py @@ -21,7 +21,7 @@ class TestConnection(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.translate._http import Connection + from google.cloud.translate_v2._http import Connection return Connection @@ -57,7 +57,7 @@ def test_build_api_url_w_extra_query_params(self): def test_extra_headers(self): from google.cloud import _http as base_http - from google.cloud.translate import _http as MUT + from google.cloud.translate_v2 import _http as MUT http = mock.Mock(spec=['request']) response = mock.Mock(status=200, spec=['status']) diff --git a/translate/tests/unit/test_client.py b/translate/tests/unit/test_client.py index d2c26cec96c4..18c19c436e45 100644 --- a/translate/tests/unit/test_client.py +++ b/translate/tests/unit/test_client.py @@ -19,16 +19,15 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.translate.client import Client - + from google.cloud.translate import Client return Client def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_constructor(self): - from google.cloud.translate._http import Connection - from google.cloud.translate.client import ENGLISH_ISO_639 + from google.cloud.translate_v2._http import Connection + from google.cloud.translate_v2.client import ENGLISH_ISO_639 http = object() client = self._make_one(_http=http) @@ -38,7 +37,7 @@ def test_constructor(self): self.assertEqual(client.target_language, ENGLISH_ISO_639) def test_constructor_non_default(self): - from google.cloud.translate._http import Connection + from google.cloud.translate_v2._http import Connection http = object() target = 'es' @@ -49,7 +48,7 @@ def test_constructor_non_default(self): self.assertEqual(client.target_language, target) def test_get_languages(self): - from google.cloud.translate.client import ENGLISH_ISO_639 + from google.cloud.translate_v2.client import ENGLISH_ISO_639 client = self._make_one(_http=object()) supported = [ From be5a16e3428889cfebec48c5c31278e30cc66c5e Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 21 Jul 2017 15:42:38 -0700 Subject: [PATCH 42/62] Add Future interface to BigQuery jobs (#3626) * Add future interface to bigquery Jobs. * Make QueryJob return QueryResults from result() * Deprecate QueryJob.results() --- bigquery/google/cloud/bigquery/job.py | 201 +++++++++++++++++++++++--- bigquery/tests/system.py | 10 ++ bigquery/tests/unit/test_job.py | 102 ++++++++++++- 3 files changed, 291 insertions(+), 22 deletions(-) diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 4f791bdbea0c..35a423b755b9 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -14,8 +14,14 @@ """Define API Jobs.""" +import collections +import threading +import warnings + import six +from six.moves import http_client +from google.cloud import exceptions from google.cloud.exceptions import NotFound from google.cloud._helpers import _datetime_from_microseconds from google.cloud.bigquery.dataset import Dataset @@ -27,6 +33,60 @@ from google.cloud.bigquery._helpers import UDFResourcesProperty from google.cloud.bigquery._helpers import _EnumProperty from google.cloud.bigquery._helpers import _TypedProperty +import google.cloud.future.base + +_DONE_STATE = 'DONE' +_STOPPED_REASON = 'stopped' + +_ERROR_REASON_TO_EXCEPTION = { + 'accessDenied': http_client.FORBIDDEN, + 'backendError': http_client.INTERNAL_SERVER_ERROR, + 'billingNotEnabled': http_client.FORBIDDEN, + 'billingTierLimitExceeded': http_client.BAD_REQUEST, + 'blocked': http_client.FORBIDDEN, + 'duplicate': http_client.CONFLICT, + 'internalError': http_client.INTERNAL_SERVER_ERROR, + 'invalid': http_client.BAD_REQUEST, + 'invalidQuery': http_client.BAD_REQUEST, + 'notFound': http_client.NOT_FOUND, + 'notImplemented': http_client.NOT_IMPLEMENTED, + 'quotaExceeded': http_client.FORBIDDEN, + 'rateLimitExceeded': http_client.FORBIDDEN, + 'resourceInUse': http_client.BAD_REQUEST, + 'resourcesExceeded': http_client.BAD_REQUEST, + 'responseTooLarge': http_client.FORBIDDEN, + 'stopped': http_client.OK, + 'tableUnavailable': http_client.BAD_REQUEST, +} + +_FakeResponse = collections.namedtuple('_FakeResponse', ['status']) + + +def _error_result_to_exception(error_result): + """Maps BigQuery error reasons to an exception. + + The reasons and their matching HTTP status codes are documented on + the `troubleshooting errors`_ page. + + .. _troubleshooting errors: https://cloud.google.com/bigquery\ + /troubleshooting-errors + + :type error_result: Mapping[str, str] + :param error_result: The error result from BigQuery. + + :rtype google.cloud.exceptions.GoogleCloudError: + :returns: The mapped exception. + """ + reason = error_result.get('reason') + status_code = _ERROR_REASON_TO_EXCEPTION.get( + reason, http_client.INTERNAL_SERVER_ERROR) + # make_exception expects an httplib2 response object. + fake_response = _FakeResponse(status=status_code) + return exceptions.make_exception( + fake_response, + error_result.get('message', ''), + error_info=error_result, + use_json=False) class Compression(_EnumProperty): @@ -82,16 +142,23 @@ class WriteDisposition(_EnumProperty): ALLOWED = (WRITE_APPEND, WRITE_TRUNCATE, WRITE_EMPTY) -class _BaseJob(object): - """Base class for jobs. +class _AsyncJob(google.cloud.future.base.PollingFuture): + """Base class for asynchronous jobs. + + :type name: str + :param name: the name of the job :type client: :class:`google.cloud.bigquery.client.Client` :param client: A client which holds credentials and project configuration for the dataset (which requires a project). """ - def __init__(self, client): + def __init__(self, name, client): + super(_AsyncJob, self).__init__() + self.name = name self._client = client self._properties = {} + self._result_set = False + self._completion_lock = threading.Lock() @property def project(self): @@ -117,21 +184,6 @@ def _require_client(self, client): client = self._client return client - -class _AsyncJob(_BaseJob): - """Base class for asynchronous jobs. - - :type name: str - :param name: the name of the job - - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). - """ - def __init__(self, name, client): - super(_AsyncJob, self).__init__(client) - self.name = name - @property def job_type(self): """Type of job @@ -273,6 +325,9 @@ def _set_properties(self, api_response): self._properties.clear() self._properties.update(cleaned) + # For Future interface + self._set_future_result() + @classmethod def _get_resource_config(cls, resource): """Helper for :meth:`from_api_repr` @@ -345,7 +400,7 @@ def exists(self, client=None): return True def reload(self, client=None): - """API call: refresh job properties via a GET request + """API call: refresh job properties via a GET request. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get @@ -371,12 +426,85 @@ def cancel(self, client=None): ``NoneType`` :param client: the client to use. If not passed, falls back to the ``client`` stored on the current dataset. + + :rtype: bool + :returns: Boolean indicating that the cancel request was sent. """ client = self._require_client(client) api_response = client._connection.api_request( method='POST', path='%s/cancel' % (self.path,)) self._set_properties(api_response['job']) + # The Future interface requires that we return True if the *attempt* + # to cancel was successful. + return True + + # The following methods implement the PollingFuture interface. Note that + # the methods above are from the pre-Future interface and are left for + # compatibility. The only "overloaded" method is :meth:`cancel`, which + # satisfies both interfaces. + + def _set_future_result(self): + """Set the result or exception from the job if it is complete.""" + # This must be done in a lock to prevent the polling thread + # and main thread from both executing the completion logic + # at the same time. + with self._completion_lock: + # If the operation isn't complete or if the result has already been + # set, do not call set_result/set_exception again. + # Note: self._result_set is set to True in set_result and + # set_exception, in case those methods are invoked directly. + if self.state != _DONE_STATE or self._result_set: + return + + if self.error_result is not None: + exception = _error_result_to_exception(self.error_result) + self.set_exception(exception) + else: + self.set_result(self) + + def done(self): + """Refresh the job and checks if it is complete. + + :rtype: bool + :returns: True if the job is complete, False otherwise. + """ + # Do not refresh is the state is already done, as the job will not + # change once complete. + if self.state != _DONE_STATE: + self.reload() + return self.state == _DONE_STATE + + def result(self, timeout=None): + """Start the job and wait for it to complete and get the result. + + :type timeout: int + :param timeout: How long to wait for job to complete before raising + a :class:`TimeoutError`. + + :rtype: _AsyncJob + :returns: This instance. + + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the job + failed or :class:`TimeoutError` if the job did not complete in the + given timeout. + """ + if self.state is None: + self.begin() + return super(_AsyncJob, self).result(timeout=timeout) + + def cancelled(self): + """Check if the job has been cancelled. + + This always returns False. It's not possible to check if a job was + cancelled in the API. This method is here to satisfy the interface + for :class:`google.cloud.future.Future`. + + :rtype: bool + :returns: False + """ + return (self.error_result is not None + and self.error_result.get('reason') == _STOPPED_REASON) class _LoadConfiguration(object): @@ -1127,7 +1255,7 @@ def from_api_repr(cls, resource, client): job._set_properties(resource) return job - def results(self): + def query_results(self): """Construct a QueryResults instance, bound to this job. :rtype: :class:`~google.cloud.bigquery.query.QueryResults` @@ -1135,3 +1263,36 @@ def results(self): """ from google.cloud.bigquery.query import QueryResults return QueryResults.from_query_job(self) + + def results(self): + """DEPRECATED. + + This method is deprecated. Use :meth:`query_results` or :meth:`result`. + + Construct a QueryResults instance, bound to this job. + + :rtype: :class:`~google.cloud.bigquery.query.QueryResults` + :returns: The query results. + """ + warnings.warn( + 'QueryJob.results() is deprecated. Please use query_results() or ' + 'result().', DeprecationWarning) + return self.query_results() + + def result(self, timeout=None): + """Start the job and wait for it to complete and get the result. + + :type timeout: int + :param timeout: How long to wait for job to complete before raising + a :class:`TimeoutError`. + + :rtype: :class:`~google.cloud.bigquery.query.QueryResults` + :returns: The query results. + + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the job + failed or :class:`TimeoutError` if the job did not complete in the + given timeout. + """ + super(QueryJob, self).result(timeout=timeout) + # Return a QueryResults instance instead of returning the job. + return self.query_results() diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 3391ec2bd2d8..1d3da3d2a83d 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -19,6 +19,7 @@ import os import time import unittest +import uuid from google.cloud import bigquery from google.cloud._helpers import UTC @@ -1013,6 +1014,15 @@ def test_large_query_w_public_data(self): rows = list(iterator) self.assertEqual(len(rows), LIMIT) + def test_async_query_future(self): + query_job = Config.CLIENT.run_async_query( + str(uuid.uuid4()), 'SELECT 1') + query_job.use_legacy_sql = False + + iterator = query_job.result().fetch_data() + rows = list(iterator) + self.assertEqual(rows, [(1,)]) + def test_insert_nested_nested(self): # See #2951 SF = bigquery.SchemaField diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 57d96bf8ae15..8b9d079df148 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -12,9 +12,34 @@ # See the License for the specific language governing permissions and # limitations under the License. +import copy +import warnings + +from six.moves import http_client import unittest +class Test__error_result_to_exception(unittest.TestCase): + def _call_fut(self, *args, **kwargs): + from google.cloud.bigquery import job + return job._error_result_to_exception(*args, **kwargs) + + def test_simple(self): + error_result = { + 'reason': 'invalid', + 'message': 'bad request' + } + exception = self._call_fut(error_result) + self.assertEqual(exception.code, http_client.BAD_REQUEST) + self.assertTrue(exception.message.startswith('bad request')) + self.assertIn("'reason': 'invalid'", exception.message) + + def test_missing_reason(self): + error_result = {} + exception = self._call_fut(error_result) + self.assertEqual(exception.code, http_client.INTERNAL_SERVER_ERROR) + + class _Base(object): PROJECT = 'project' SOURCE1 = 'http://example.com/source1.csv' @@ -1514,15 +1539,88 @@ def test_from_api_repr_w_properties(self): self.assertIs(dataset._client, client) self._verifyResourceProperties(dataset, RESOURCE) - def test_results(self): + def test_cancelled(self): + client = _Client(self.PROJECT) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + job._properties['status'] = { + 'state': 'DONE', + 'errorResult': { + 'reason': 'stopped' + } + } + + self.assertTrue(job.cancelled()) + + def test_query_results(self): from google.cloud.bigquery.query import QueryResults client = _Client(self.PROJECT) job = self._make_one(self.JOB_NAME, self.QUERY, client) - results = job.results() + results = job.query_results() self.assertIsInstance(results, QueryResults) self.assertIs(results._job, job) + def test_results_is_deprecated(self): + client = _Client(self.PROJECT) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + + with warnings.catch_warnings(record=True) as warned: + warnings.simplefilter('always') + job.results() + self.assertEqual(len(warned), 1) + self.assertIn('deprecated', str(warned[0])) + + def test_result(self): + from google.cloud.bigquery.query import QueryResults + + client = _Client(self.PROJECT) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + job._properties['status'] = {'state': 'DONE'} + + result = job.result() + + self.assertIsInstance(result, QueryResults) + self.assertIs(result._job, job) + + def test_result_invokes_begins(self): + begun_resource = self._makeResource() + done_resource = copy.deepcopy(begun_resource) + done_resource['status'] = {'state': 'DONE'} + connection = _Connection(begun_resource, done_resource) + client = _Client(self.PROJECT, connection=connection) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + + job.result() + + self.assertEqual(len(connection._requested), 2) + begin_request, reload_request = connection._requested + self.assertEqual(begin_request['method'], 'POST') + self.assertEqual(reload_request['method'], 'GET') + + def test_result_error(self): + from google.cloud import exceptions + + client = _Client(self.PROJECT) + job = self._make_one(self.JOB_NAME, self.QUERY, client) + error_result = { + 'debugInfo': 'DEBUG', + 'location': 'LOCATION', + 'message': 'MESSAGE', + 'reason': 'invalid' + } + job._properties['status'] = { + 'errorResult': error_result, + 'errors': [error_result], + 'state': 'DONE' + } + job._set_future_result() + + with self.assertRaises(exceptions.GoogleCloudError) as exc_info: + job.result() + + self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) + self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) + def test_begin_w_bound_client(self): PATH = '/projects/%s/jobs' % (self.PROJECT,) RESOURCE = self._makeResource() From 01f8689435b068ab1b53269d3ccfa8648ba0f666 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Jul 2017 16:20:38 -0700 Subject: [PATCH 43/62] Removing vendored in google.cloud.streaming. (#3654) * Removing vendored in google.cloud.streaming. * Modifying setup.cfg so pytest errors are sane. This is **not** to be merged, just to debug the b0rken build: https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python/2515 --- core/google/cloud/streaming/__init__.py | 17 - .../google/cloud/streaming/buffered_stream.py | 106 - core/google/cloud/streaming/exceptions.py | 122 - core/google/cloud/streaming/http_wrapper.py | 396 ---- core/google/cloud/streaming/stream_slice.py | 87 - core/google/cloud/streaming/transfer.py | 1223 ---------- core/google/cloud/streaming/util.py | 74 - core/tests/unit/streaming/__init__.py | 13 - .../unit/streaming/test_buffered_stream.py | 141 -- core/tests/unit/streaming/test_exceptions.py | 105 - .../tests/unit/streaming/test_http_wrapper.py | 498 ---- .../tests/unit/streaming/test_stream_slice.py | 90 - core/tests/unit/streaming/test_transfer.py | 2035 ----------------- core/tests/unit/streaming/test_util.py | 66 - setup.cfg | 3 + 15 files changed, 3 insertions(+), 4973 deletions(-) delete mode 100644 core/google/cloud/streaming/__init__.py delete mode 100644 core/google/cloud/streaming/buffered_stream.py delete mode 100644 core/google/cloud/streaming/exceptions.py delete mode 100644 core/google/cloud/streaming/http_wrapper.py delete mode 100644 core/google/cloud/streaming/stream_slice.py delete mode 100644 core/google/cloud/streaming/transfer.py delete mode 100644 core/google/cloud/streaming/util.py delete mode 100644 core/tests/unit/streaming/__init__.py delete mode 100644 core/tests/unit/streaming/test_buffered_stream.py delete mode 100644 core/tests/unit/streaming/test_exceptions.py delete mode 100644 core/tests/unit/streaming/test_http_wrapper.py delete mode 100644 core/tests/unit/streaming/test_stream_slice.py delete mode 100644 core/tests/unit/streaming/test_transfer.py delete mode 100644 core/tests/unit/streaming/test_util.py diff --git a/core/google/cloud/streaming/__init__.py b/core/google/cloud/streaming/__init__.py deleted file mode 100644 index 44e00907cb66..000000000000 --- a/core/google/cloud/streaming/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Vendored-in from google-apitools 0.4.11 - -"""Base ``google.cloud.streaming`` package.""" diff --git a/core/google/cloud/streaming/buffered_stream.py b/core/google/cloud/streaming/buffered_stream.py deleted file mode 100644 index 24a52176cb66..000000000000 --- a/core/google/cloud/streaming/buffered_stream.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Small helper class to provide a small slice of a stream. - -This class reads ahead to detect if we are at the end of the stream. -""" - - -class BufferedStream(object): - """Buffers a stream, reading ahead to determine if we're at the end. - - :type stream: readable file-like object - :param stream: the stream to be buffered - - :type start: int - :param start: the starting point in the stream - - :type size: int - :param size: the size of the buffer - """ - def __init__(self, stream, start, size): - self._stream = stream - self._start_pos = start - self._buffer_pos = 0 - - if not hasattr(self._stream, 'closed') or not self._stream.closed: - self._buffered_data = self._stream.read(size) - else: - self._buffered_data = b'' - - self._stream_at_end = len(self._buffered_data) < size - self._end_pos = self._start_pos + len(self._buffered_data) - - def __repr__(self): - return ('Buffered stream %s from position %s-%s with %s ' - 'bytes remaining' % (self._stream, self._start_pos, - self._end_pos, self._bytes_remaining)) - - def __len__(self): - return len(self._buffered_data) - - @property - def stream_exhausted(self): - """Does the stream have bytes remaining beyond the buffer - - :rtype: bool - :returns: Boolean indicating if the stream is exhausted. - """ - return self._stream_at_end - - @property - def stream_end_position(self): - """Point to which stream was read into the buffer - - :rtype: int - :returns: The end-position of the stream. - """ - return self._end_pos - - @property - def _bytes_remaining(self): - """Bytes remaining to be read from the buffer - - :rtype: int - :returns: The number of bytes remaining. - """ - return len(self._buffered_data) - self._buffer_pos - - def read(self, size=None): - """Read bytes from the buffer. - - :type size: int - :param size: - (Optional) How many bytes to read (defaults to all remaining - bytes). - - :rtype: str - :returns: The data read from the stream. - """ - if size is None or size < 0: - raise ValueError( - 'Illegal read of size %s requested on BufferedStream. ' - 'Wrapped stream %s is at position %s-%s, ' - '%s bytes remaining.' % - (size, self._stream, self._start_pos, self._end_pos, - self._bytes_remaining)) - - if not self._bytes_remaining: - return b'' - - size = min(size, self._bytes_remaining) - data = self._buffered_data[self._buffer_pos:self._buffer_pos + size] - self._buffer_pos += size - return data diff --git a/core/google/cloud/streaming/exceptions.py b/core/google/cloud/streaming/exceptions.py deleted file mode 100644 index cfeb8f8fa41f..000000000000 --- a/core/google/cloud/streaming/exceptions.py +++ /dev/null @@ -1,122 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Exceptions for generated client libraries.""" - - -class Error(Exception): - """Base class for all exceptions.""" - - -class CommunicationError(Error): - """Any communication error talking to an API server.""" - - -class HttpError(CommunicationError): - """Error making a request. Soon to be HttpError. - - :type response: dict - :param response: headers from the response which returned the error - - :type content: bytes - :param content: payload of the response which returned the error - - :type url: str - :param url: URL of the response which returned the error - """ - def __init__(self, response, content, url): - super(HttpError, self).__init__() - self.response = response - self.content = content - self.url = url - - def __str__(self): - content = self.content.decode('ascii', 'replace') - return 'HttpError accessing <%s>: response: <%s>, content <%s>' % ( - self.url, self.response, content) - - @property - def status_code(self): - """Status code for the response. - - :rtype: int - :returns: the code - """ - return int(self.response['status']) - - @classmethod - def from_response(cls, http_response): - """Factory: construct an exception from a response. - - :type http_response: :class:`~.http_wrapper.Response` - :param http_response: the response which returned the error - - :rtype: :class:`HttpError` - :returns: The error created from the response. - """ - return cls(http_response.info, http_response.content, - http_response.request_url) - - -class TransferError(CommunicationError): - """Errors related to transfers.""" - - -class TransferRetryError(TransferError): - """Retryable errors related to transfers.""" - - -class TransferInvalidError(TransferError): - """The given transfer is invalid.""" - - -class RequestError(CommunicationError): - """The request was not successful.""" - - -class RetryAfterError(HttpError): - """The response contained a retry-after header. - - :type response: dict - :param response: headers from the response which returned the error. - - :type content: bytes - :param content: payload of the response which returned the error. - - :type url: str - :param url: URL of the response which returned the error. - - :type retry_after: int - :param retry_after: seconds to wait before retrying. - """ - def __init__(self, response, content, url, retry_after): - super(RetryAfterError, self).__init__(response, content, url) - self.retry_after = int(retry_after) - - @classmethod - def from_response(cls, http_response): - """Factory: construct an exception from a response. - - :type http_response: :class:`~.http_wrapper.Response` - :param http_response: the response which returned the error. - - :rtype: :class:`RetryAfterError` - :returns: The error created from the response. - """ - return cls(http_response.info, http_response.content, - http_response.request_url, http_response.retry_after) - - -class BadStatusCodeError(HttpError): - """The request completed but returned a bad status code.""" diff --git a/core/google/cloud/streaming/http_wrapper.py b/core/google/cloud/streaming/http_wrapper.py deleted file mode 100644 index e80e105175e7..000000000000 --- a/core/google/cloud/streaming/http_wrapper.py +++ /dev/null @@ -1,396 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""HTTP wrapper for apitools. - -This library wraps the underlying http library we use, which is -currently :mod:`httplib2`. -""" - -import collections -import contextlib -import logging -import socket -import time - -import httplib2 -import six -from six.moves import http_client -from six.moves.urllib import parse - -from google.cloud.streaming.exceptions import BadStatusCodeError -from google.cloud.streaming.exceptions import RequestError -from google.cloud.streaming.exceptions import RetryAfterError -from google.cloud.streaming.util import calculate_wait_for_retry - - -_REDIRECTIONS = 5 -# 308 and 429 don't have names in httplib. -RESUME_INCOMPLETE = 308 -TOO_MANY_REQUESTS = 429 - - -_REDIRECT_STATUS_CODES = ( - http_client.MOVED_PERMANENTLY, - http_client.FOUND, - http_client.SEE_OTHER, - http_client.TEMPORARY_REDIRECT, - RESUME_INCOMPLETE, -) - - -_RETRYABLE_EXCEPTIONS = ( - http_client.BadStatusLine, - http_client.IncompleteRead, - http_client.ResponseNotReady, - socket.error, - httplib2.ServerNotFoundError, - ValueError, - RequestError, - BadStatusCodeError, - RetryAfterError, -) - - -@contextlib.contextmanager -def _httplib2_debug_level(http_request, level, http=None): - """Temporarily change the value of httplib2.debuglevel, if necessary. - - If http_request has a `loggable_body` distinct from `body`, then we - need to prevent httplib2 from logging the full body. This sets - httplib2.debuglevel for the duration of the `with` block; however, - that alone won't change the value of existing HTTP connections. If - an httplib2.Http object is provided, we'll also change the level on - any cached connections attached to it. - - :type http_request: :class:`Request` - :param http_request: the request to be logged. - - :type level: int - :param level: the debuglevel for logging. - - :type http: :class:`httplib2.Http` - :param http: - (Optional) the instance on whose connections to set the debuglevel. - """ - if http_request.loggable_body is None: - yield - return - old_level = httplib2.debuglevel - http_levels = {} - httplib2.debuglevel = level - if http is not None and getattr(http, 'connections', None) is not None: - for connection_key, connection in http.connections.items(): - # httplib2 stores two kinds of values in this dict, connection - # classes and instances. Since the connection types are all - # old-style classes, we can't easily distinguish by connection - # type -- so instead we use the key pattern. - if ':' not in connection_key: - continue - http_levels[connection_key] = connection.debuglevel - connection.set_debuglevel(level) - yield - httplib2.debuglevel = old_level - if http is not None: - for connection_key, old_level in http_levels.items(): - http.connections[connection_key].set_debuglevel(old_level) - - -class Request(object): - """Encapsulates the data for an HTTP request. - - :type url: str - :param url: the URL for the request - - :type http_method: str - :param http_method: the HTTP method to use for the request - - :type headers: mapping - :param headers: (Optional) headers to be sent with the request - - :type body: str - :param body: body to be sent with the request - """ - def __init__(self, url='', http_method='GET', headers=None, body=''): - self.url = url - self.http_method = http_method - self.headers = headers or {} - self._body = None - self._loggable_body = None - self.body = body - - @property - def loggable_body(self): - """Request body for logging purposes - - :rtype: str - :returns: The body to be logged. - """ - return self._loggable_body - - @loggable_body.setter - def loggable_body(self, value): - """Update request body for logging purposes - - :type value: str - :param value: updated body - - :raises: :exc:`RequestError` if the request does not have a body. - """ - if self.body is None: - raise RequestError( - 'Cannot set loggable body on request with no body') - self._loggable_body = value - - @property - def body(self): - """Request body - - :rtype: str - :returns: The body of the request. - """ - return self._body - - @body.setter - def body(self, value): - """Update the request body - - Handles logging and length measurement. - - :type value: str - :param value: updated body - """ - self._body = value - if value is not None: - # Avoid calling len() which cannot exceed 4GiB in 32-bit python. - body_length = getattr( - self._body, 'length', None) or len(self._body) - self.headers['content-length'] = str(body_length) - else: - self.headers.pop('content-length', None) - # This line ensures we don't try to print large requests. - if not isinstance(value, (type(None), six.string_types)): - self.loggable_body = '' - - -def _process_content_range(content_range): - """Convert a 'Content-Range' header into a length for the response. - - Helper for :meth:`Response.length`. - - :type content_range: str - :param content_range: the header value being parsed. - - :rtype: int - :returns: the length of the response chunk. - """ - _, _, range_spec = content_range.partition(' ') - byte_range, _, _ = range_spec.partition('/') - start, _, end = byte_range.partition('-') - return int(end) - int(start) + 1 - - -# Note: currently the order of fields here is important, since we want -# to be able to pass in the result from httplib2.request. -_ResponseTuple = collections.namedtuple( - 'HttpResponse', ['info', 'content', 'request_url']) - - -class Response(_ResponseTuple): - """Encapsulates data for an HTTP response. - """ - __slots__ = () - - def __len__(self): - return self.length - - @property - def length(self): - """Length of this response. - - Exposed as an attribute since using ``len()`` directly can fail - for responses larger than ``sys.maxint``. - - :rtype: int or long - :returns: The length of the response. - """ - if 'content-encoding' in self.info and 'content-range' in self.info: - # httplib2 rewrites content-length in the case of a compressed - # transfer; we can't trust the content-length header in that - # case, but we *can* trust content-range, if it's present. - return _process_content_range(self.info['content-range']) - elif 'content-length' in self.info: - return int(self.info.get('content-length')) - elif 'content-range' in self.info: - return _process_content_range(self.info['content-range']) - return len(self.content) - - @property - def status_code(self): - """HTTP status code - - :rtype: int - :returns: The response status code. - """ - return int(self.info['status']) - - @property - def retry_after(self): - """Retry interval (if set). - - :rtype: int - :returns: interval in seconds - """ - if 'retry-after' in self.info: - return int(self.info['retry-after']) - - @property - def is_redirect(self): - """Does this response contain a redirect - - :rtype: bool - :returns: True if the status code indicates a redirect and the - 'location' header is present. - """ - return (self.status_code in _REDIRECT_STATUS_CODES and - 'location' in self.info) - - -def _check_response(response): - """Validate a response - - :type response: :class:`Response` - :param response: the response to validate - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if response - is None, :exc:`~.exceptions.BadStatusCodeError` if response status - code indicates an error, or :exc:`~.exceptions.RetryAfterError` - if response indicates a retry interval. - """ - if response is None: - # Caller shouldn't call us if the response is None, but handle anyway. - raise RequestError( - 'Request did not return a response.') - elif (response.status_code >= 500 or - response.status_code == TOO_MANY_REQUESTS): - raise BadStatusCodeError.from_response(response) - elif response.retry_after: - raise RetryAfterError.from_response(response) - - -def _reset_http_connections(http): - """Rebuild all http connections in the httplib2.Http instance. - - httplib2 overloads the map in http.connections to contain two different - types of values: - { scheme string: connection class } and - { scheme + authority string : actual http connection } - Here we remove all of the entries for actual connections so that on the - next request httplib2 will rebuild them from the connection types. - - :type http: :class:`httplib2.Http` - :param http: the instance whose connections are to be rebuilt - """ - if getattr(http, 'connections', None): - for conn_key in list(http.connections.keys()): - if ':' in conn_key: - del http.connections[conn_key] - - -def _make_api_request_no_retry(http, http_request, redirections=_REDIRECTIONS): - """Send an HTTP request via the given http instance. - - This wrapper exists to handle translation between the plain httplib2 - request/response types and the Request and Response types above. - - :type http: :class:`httplib2.Http` - :param http: an instance which impelements the `Http` API. - - :type http_request: :class:`Request` - :param http_request: the request to send. - - :type redirections: int - :param redirections: Number of redirects to follow. - - :rtype: :class:`Response` - :returns: an object representing the server's response - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no - response could be parsed. - """ - connection_type = None - # Handle overrides for connection types. This is used if the caller - # wants control over the underlying connection for managing callbacks - # or hash digestion. - if getattr(http, 'connections', None): - url_scheme = parse.urlsplit(http_request.url).scheme - if url_scheme and url_scheme in http.connections: - connection_type = http.connections[url_scheme] - - # Custom printing only at debuglevel 4 - new_debuglevel = 4 if httplib2.debuglevel == 4 else 0 - with _httplib2_debug_level(http_request, new_debuglevel, http=http): - info, content = http.request( - str(http_request.url), method=str(http_request.http_method), - body=http_request.body, headers=http_request.headers, - redirections=redirections, connection_type=connection_type) - - if info is None: - raise RequestError() - - response = Response(info, content, http_request.url) - _check_response(response) - return response - - -def make_api_request(http, http_request, retries=7, - redirections=_REDIRECTIONS): - """Send an HTTP request via the given http, performing error/retry handling. - - :type http: :class:`httplib2.Http` - :param http: an instance which implements the `Http` API. - - :type http_request: :class:`Request` - :param http_request: the request to send. - - :type retries: int - :param retries: Number of retries to attempt on retryable - responses (such as 429 or 5XX). - - :type redirections: int - :param redirections: Number of redirects to follow. - - :rtype: :class:`Response` - :returns: an object representing the server's response. - - :raises: :exc:`google.cloud.streaming.exceptions.RequestError` if no - response could be parsed. - """ - retry = 0 - while True: - try: - return _make_api_request_no_retry(http, http_request, - redirections=redirections) - except _RETRYABLE_EXCEPTIONS as exc: - retry += 1 - if retry >= retries: - raise - retry_after = getattr(exc, 'retry_after', None) - if retry_after is None: - retry_after = calculate_wait_for_retry(retry) - - _reset_http_connections(http) - logging.debug('Retrying request to url %s after exception %s', - http_request.url, type(exc).__name__) - time.sleep(retry_after) diff --git a/core/google/cloud/streaming/stream_slice.py b/core/google/cloud/streaming/stream_slice.py deleted file mode 100644 index 3a13337bb993..000000000000 --- a/core/google/cloud/streaming/stream_slice.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Small helper class to provide a small slice of a stream.""" - -from six.moves import http_client - - -class StreamSlice(object): - """Provides a slice-like object for streams. - - :type stream: readable file-like object - :param stream: the stream to be buffered. - - :type max_bytes: int - :param max_bytes: maximum number of bytes to return in the slice. - """ - def __init__(self, stream, max_bytes): - self._stream = stream - self._remaining_bytes = max_bytes - self._max_bytes = max_bytes - - def __repr__(self): - return 'Slice of stream %s with %s/%s bytes not yet read' % ( - self._stream, self._remaining_bytes, self._max_bytes) - - def __len__(self): - return self._max_bytes - - def __nonzero__(self): - # For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid - # accidental len() calls from httplib in the form of "if this_object:". - return bool(self._max_bytes) - - @property - def length(self): - """Maximum number of bytes to return in the slice. - - .. note:: - - For 32-bit python2.x, len() cannot exceed a 32-bit number. - - :rtype: int - :returns: The max "length" of the stream. - """ - return self._max_bytes - - def read(self, size=None): - """Read bytes from the slice. - - Compared to other streams, there is one case where we may - unexpectedly raise an exception on read: if the underlying stream - is exhausted (i.e. returns no bytes on read), and the size of this - slice indicates we should still be able to read more bytes, we - raise :exc:`IncompleteRead`. - - :type size: int - :param size: - (Optional) If provided, read no more than size bytes from the - stream. - - :rtype: bytes - :returns: bytes read from this slice. - - :raises: :exc:`IncompleteRead` - """ - if size is not None: - read_size = min(size, self._remaining_bytes) - else: - read_size = self._remaining_bytes - data = self._stream.read(read_size) - if read_size > 0 and not data: - raise http_client.IncompleteRead( - self._max_bytes - self._remaining_bytes, self._max_bytes) - self._remaining_bytes -= len(data) - return data diff --git a/core/google/cloud/streaming/transfer.py b/core/google/cloud/streaming/transfer.py deleted file mode 100644 index 3d6d5b8e6016..000000000000 --- a/core/google/cloud/streaming/transfer.py +++ /dev/null @@ -1,1223 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# pylint: disable=too-many-lines - -"""Upload and download support for apitools.""" - -import email.generator as email_generator -import email.mime.multipart as mime_multipart -import email.mime.nonmultipart as mime_nonmultipart -import mimetypes -import os - -import httplib2 -import six -from six.moves import http_client - -from google.cloud._helpers import _to_bytes -from google.cloud.streaming.buffered_stream import BufferedStream -from google.cloud.streaming.exceptions import CommunicationError -from google.cloud.streaming.exceptions import HttpError -from google.cloud.streaming.exceptions import TransferInvalidError -from google.cloud.streaming.exceptions import TransferRetryError -from google.cloud.streaming.http_wrapper import make_api_request -from google.cloud.streaming.http_wrapper import Request -from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE -from google.cloud.streaming.stream_slice import StreamSlice -from google.cloud.streaming.util import acceptable_mime_type - - -RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 -SIMPLE_UPLOAD = 'simple' -RESUMABLE_UPLOAD = 'resumable' - - -_DEFAULT_CHUNKSIZE = 1 << 20 - - -class _Transfer(object): - """Generic bits common to Uploads and Downloads. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type close_stream: bool - :param close_stream: should this instance close the stream when deleted - - :type chunksize: int - :param chunksize: the size of chunks used to download/upload a file. - - :type auto_transfer: bool - :param auto_transfer: should this instance automatically begin transfering - data when initialized - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance used to perform requests. - - :type num_retries: int - :param num_retries: how many retries should the transfer attempt - """ - - _num_retries = None - - def __init__(self, stream, close_stream=False, - chunksize=_DEFAULT_CHUNKSIZE, auto_transfer=True, - http=None, num_retries=5): - self._bytes_http = None - self._close_stream = close_stream - self._http = http - self._stream = stream - self._url = None - - # Let the @property do validation. - self.num_retries = num_retries - - self.auto_transfer = auto_transfer - self.chunksize = chunksize - - def __repr__(self): - return str(self) - - @property - def close_stream(self): - """Should this instance close the stream when deleted. - - :rtype: bool - :returns: Boolean indicated if the stream should be closed. - """ - return self._close_stream - - @property - def http(self): - """Http instance used to perform requests. - - :rtype: :class:`httplib2.Http` (or workalike) - :returns: The HTTP object used for requests. - """ - return self._http - - @property - def bytes_http(self): - """Http instance used to perform binary requests. - - Defaults to :attr:`http`. - - :rtype: :class:`httplib2.Http` (or workalike) - :returns: The HTTP object used for binary requests. - """ - return self._bytes_http or self.http - - @bytes_http.setter - def bytes_http(self, value): - """Update Http instance used to perform binary requests. - - :type value: :class:`httplib2.Http` (or workalike) - :param value: new instance - """ - self._bytes_http = value - - @property - def num_retries(self): - """How many retries should the transfer attempt - - :rtype: int - :returns: The number of retries allowed. - """ - return self._num_retries - - @num_retries.setter - def num_retries(self, value): - """Update how many retries should the transfer attempt - - :type value: int - """ - if not isinstance(value, six.integer_types): - raise ValueError("num_retries: pass an integer") - - if value < 0: - raise ValueError( - 'Cannot have negative value for num_retries') - self._num_retries = value - - @property - def stream(self): - """Stream to/from which data is downloaded/uploaded. - - :rtype: file-like object - :returns: The stream that sends/receives data. - """ - return self._stream - - @property - def url(self): - """URL to / from which data is downloaded/uploaded. - - :rtype: str - :returns: The URL where data is sent/received. - """ - return self._url - - def _initialize(self, http, url): - """Initialize this download by setting :attr:`http` and :attr`url`. - - Allow the user to be able to pre-initialize :attr:`http` by setting - the value in the constructor; in that case, we ignore the provided - http. - - :type http: :class:`httplib2.Http` (or a worklike) or None. - :param http: the Http instance to use to make requests. - - :type url: str - :param url: The url for this transfer. - """ - self._ensure_uninitialized() - if self.http is None: - self._http = http or httplib2.Http() - self._url = url - - @property - def initialized(self): - """Has the instance been initialized - - :rtype: bool - :returns: Boolean indicating if the current transfer - has been initialized. - """ - return self.url is not None and self.http is not None - - def _ensure_initialized(self): - """Helper: assert that the instance is initialized. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - if the instance is not initialized. - """ - if not self.initialized: - raise TransferInvalidError( - 'Cannot use uninitialized %s', type(self).__name__) - - def _ensure_uninitialized(self): - """Helper: assert that the instance is not initialized. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - if the instance is already initialized. - """ - if self.initialized: - raise TransferInvalidError( - 'Cannot re-initialize %s', type(self).__name__) - - def __del__(self): - if self._close_stream: - self._stream.close() - - -class Download(_Transfer): - """Represent a single download. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type kwds: dict - :param kwds: keyword arguments: all except ``total_size`` are passed - through to :meth:`_Transfer.__init__()`. - """ - _ACCEPTABLE_STATUSES = set(( - http_client.OK, - http_client.NO_CONTENT, - http_client.PARTIAL_CONTENT, - http_client.REQUESTED_RANGE_NOT_SATISFIABLE, - )) - - def __init__(self, stream, **kwds): - total_size = kwds.pop('total_size', None) - super(Download, self).__init__(stream, **kwds) - self._initial_response = None - self._progress = 0 - self._total_size = total_size - self._encoding = None - - @classmethod - def from_file(cls, filename, overwrite=False, auto_transfer=True, **kwds): - """Create a new download object from a filename. - - :type filename: str - :param filename: path/filename for the target file - - :type overwrite: bool - :param overwrite: should an existing file be overwritten - - :type auto_transfer: bool - :param auto_transfer: should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Download` - :returns: The download initiated from the file passed. - """ - path = os.path.expanduser(filename) - if os.path.exists(path) and not overwrite: - raise ValueError( - 'File %s exists and overwrite not specified' % path) - return cls(open(path, 'wb'), close_stream=True, - auto_transfer=auto_transfer, **kwds) - - @classmethod - def from_stream(cls, stream, auto_transfer=True, total_size=None, **kwds): - """Create a new Download object from a stream. - - :type stream: writable file-like object - :param stream: the target file - - :type total_size: int - :param total_size: (Optional) total size of the file to be downloaded - - :type auto_transfer: bool - :param auto_transfer: should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Download` - :returns: The download initiated from the stream passed. - """ - return cls(stream, auto_transfer=auto_transfer, total_size=total_size, - **kwds) - - @property - def progress(self): - """Number of bytes have been downloaded. - - :rtype: int >= 0 - :returns: The number of downloaded bytes. - """ - return self._progress - - @property - def total_size(self): - """Total number of bytes to be downloaded. - - :rtype: int or None - :returns: The total number of bytes to download. - """ - return self._total_size - - @property - def encoding(self): - """'Content-Encoding' used to transfer the file - - :rtype: str or None - :returns: The encoding of the downloaded content. - """ - return self._encoding - - def __repr__(self): - if not self.initialized: - return 'Download (uninitialized)' - else: - return 'Download with %d/%s bytes transferred from url %s' % ( - self.progress, self.total_size, self.url) - - def configure_request(self, http_request, url_builder): - """Update http_request/url_builder with download-appropriate values. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be updated - - :type url_builder: instance with settable 'query_params' attribute. - :param url_builder: transfer policy object to be updated - """ - url_builder.query_params['alt'] = 'media' - http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,) - - def _set_total(self, info): - """Update 'total_size' based on data from a response. - - :type info: mapping - :param info: response headers - """ - if 'content-range' in info: - _, _, total = info['content-range'].rpartition('/') - if total != '*': - self._total_size = int(total) - # Note "total_size is None" means we don't know it; if no size - # info was returned on our initial range request, that means we - # have a 0-byte file. (That last statement has been verified - # empirically, but is not clearly documented anywhere.) - if self.total_size is None: - self._total_size = 0 - - def initialize_download(self, http_request, http): - """Initialize this download. - - If the instance has :attr:`auto_transfer` enabled, begins the - download immediately. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to use to initialize this download. - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance for this request. - """ - self._ensure_uninitialized() - url = http_request.url - if self.auto_transfer: - end_byte = self._compute_end_byte(0) - self._set_range_header(http_request, 0, end_byte) - response = make_api_request( - self.bytes_http or http, http_request) - if response.status_code not in self._ACCEPTABLE_STATUSES: - raise HttpError.from_response(response) - self._initial_response = response - self._set_total(response.info) - url = response.info.get('content-location', response.request_url) - self._initialize(http, url) - # Unless the user has requested otherwise, we want to just - # go ahead and pump the bytes now. - if self.auto_transfer: - self.stream_file(use_chunks=True, headers=http_request.headers) - - def _normalize_start_end(self, start, end=None): - """Validate / fix up byte range. - - :type start: int - :param start: start byte of the range: if negative, used as an - offset from the end. - - :type end: int - :param end: end byte of the range. - - :rtype: tuple, (start, end) - :returns: the normalized start, end pair. - :raises: :exc:`google.cloud.streaming.exceptions.TransferInvalidError` - for invalid combinations of start, end. - """ - if end is not None: - if start < 0: - raise TransferInvalidError( - 'Cannot have end index with negative start index') - elif start >= self.total_size: - raise TransferInvalidError( - 'Cannot have start index greater than total size') - end = min(end, self.total_size - 1) - if end < start: - raise TransferInvalidError( - 'Range requested with end[%s] < start[%s]' % (end, start)) - return start, end - else: - if start < 0: - start = max(0, start + self.total_size) - return start, self.total_size - 1 - - @staticmethod - def _set_range_header(request, start, end=None): - """Update the 'Range' header in a request to match a byte range. - - :type request: :class:`google.cloud.streaming.http_wrapper.Request` - :param request: the request to update - - :type start: int - :param start: start byte of the range: if negative, used as an - offset from the end. - - :type end: int - :param end: end byte of the range. - """ - if start < 0: - request.headers['range'] = 'bytes=%d' % start - elif end is None: - request.headers['range'] = 'bytes=%d-' % start - else: - request.headers['range'] = 'bytes=%d-%d' % (start, end) - - def _compute_end_byte(self, start, end=None, use_chunks=True): - """Compute the last byte to fetch for this request. - - Based on the HTTP spec for Range and Content-Range. - - .. note:: - This is potentially confusing in several ways: - - the value for the last byte is 0-based, eg "fetch 10 bytes - from the beginning" would return 9 here. - - if we have no information about size, and don't want to - use the chunksize, we'll return None. - - :type start: int - :param start: start byte of the range. - - :type end: int - :param end: (Optional) suggested last byte of the range. - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize`. - - :rtype: str - :returns: Last byte to use in a 'Range' header, or None. - """ - end_byte = end - - if start < 0 and not self.total_size: - return end_byte - - if use_chunks: - alternate = start + self.chunksize - 1 - if end_byte is not None: - end_byte = min(end_byte, alternate) - else: - end_byte = alternate - - if self.total_size: - alternate = self.total_size - 1 - if end_byte is not None: - end_byte = min(end_byte, alternate) - else: - end_byte = alternate - - return end_byte - - def _get_chunk(self, start, end, headers=None): - """Retrieve a chunk of the file. - - :type start: int - :param start: start byte of the range. - - :type end: int - :param end: (Optional) end byte of the range. - - :type headers: dict - :param headers: (Optional) Headers to be used for the ``Request``. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: response from the chunk request. - """ - self._ensure_initialized() - request = Request(url=self.url, headers=headers) - self._set_range_header(request, start, end=end) - return make_api_request( - self.bytes_http, request, retries=self.num_retries) - - def _process_response(self, response): - """Update attribtes and writing stream, based on response. - - :type response: :class:`google.cloud.streaming.http_wrapper.Response` - :param response: response from a download request. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: the response - :raises: :exc:`google.cloud.streaming.exceptions.HttpError` for - missing / unauthorized responses; - :exc:`google.cloud.streaming.exceptions.TransferRetryError` - for other error responses. - """ - if response.status_code not in self._ACCEPTABLE_STATUSES: - # We distinguish errors that mean we made a mistake in setting - # up the transfer versus something we should attempt again. - if response.status_code in (http_client.FORBIDDEN, - http_client.NOT_FOUND): - raise HttpError.from_response(response) - else: - raise TransferRetryError(response.content) - if response.status_code in (http_client.OK, - http_client.PARTIAL_CONTENT): - self.stream.write(response.content) - self._progress += response.length - if response.info and 'content-encoding' in response.info: - self._encoding = response.info['content-encoding'] - elif response.status_code == http_client.NO_CONTENT: - # It's important to write something to the stream for the case - # of a 0-byte download to a file, as otherwise python won't - # create the file. - self.stream.write('') - return response - - def get_range(self, start, end=None, use_chunks=True): - """Retrieve a given byte range from this download, inclusive. - - Writes retrieved bytes into :attr:`stream`. - - Range must be of one of these three forms: - * 0 <= start, end = None: Fetch from start to the end of the file. - * 0 <= start <= end: Fetch the bytes from start to end. - * start < 0, end = None: Fetch the last -start bytes of the file. - - (These variations correspond to those described in the HTTP 1.1 - protocol for range headers in RFC 2616, sec. 14.35.1.) - - :type start: int - :param start: Where to start fetching bytes. (See above.) - - :type end: int - :param end: (Optional) Where to stop fetching bytes. (See above.) - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize` - and fetch this range in a single request. - If True, streams via chunks. - - :raises: :exc:`google.cloud.streaming.exceptions.TransferRetryError` - if a request returns an empty response. - """ - self._ensure_initialized() - progress_end_normalized = False - if self.total_size is not None: - progress, end_byte = self._normalize_start_end(start, end) - progress_end_normalized = True - else: - progress = start - end_byte = end - while (not progress_end_normalized or end_byte is None or - progress <= end_byte): - end_byte = self._compute_end_byte(progress, end=end_byte, - use_chunks=use_chunks) - response = self._get_chunk(progress, end_byte) - if not progress_end_normalized: - self._set_total(response.info) - progress, end_byte = self._normalize_start_end(start, end) - progress_end_normalized = True - response = self._process_response(response) - progress += response.length - if response.length == 0: - raise TransferRetryError( - 'Zero bytes unexpectedly returned in download response') - - def stream_file(self, use_chunks=True, headers=None): - """Stream the entire download. - - Writes retrieved bytes into :attr:`stream`. - - :type use_chunks: bool - :param use_chunks: If False, ignore :attr:`chunksize` - and stream this download in a single request. - If True, streams via chunks. - - :type headers: dict - :param headers: (Optional) Headers to be used for the ``Request``. - """ - self._ensure_initialized() - while True: - if self._initial_response is not None: - response = self._initial_response - self._initial_response = None - else: - end_byte = self._compute_end_byte(self.progress, - use_chunks=use_chunks) - response = self._get_chunk(self.progress, end_byte, - headers=headers) - if self.total_size is None: - self._set_total(response.info) - response = self._process_response(response) - if (response.status_code == http_client.OK or - self.progress >= self.total_size): - break - - -class Upload(_Transfer): - """Represent a single Upload. - - :type stream: file-like object - :param stream: stream to/from which data is downloaded/uploaded. - - :type mime_type: str: - :param mime_type: MIME type of the upload. - - :type total_size: int - :param total_size: (Optional) Total upload size for the stream. - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance used to perform requests. - - :type close_stream: bool - :param close_stream: should this instance close the stream when deleted - - :type auto_transfer: bool - :param auto_transfer: should this instance automatically begin transfering - data when initialized - - :type kwds: dict - :param kwds: keyword arguments: all except ``total_size`` are passed - through to :meth:`_Transfer.__init__()`. - """ - _REQUIRED_SERIALIZATION_KEYS = set(( - 'auto_transfer', 'mime_type', 'total_size', 'url')) - - def __init__(self, stream, mime_type, total_size=None, http=None, - close_stream=False, auto_transfer=True, - **kwds): - super(Upload, self).__init__( - stream, close_stream=close_stream, auto_transfer=auto_transfer, - http=http, **kwds) - self._final_response = None - self._server_chunk_granularity = None - self._complete = False - self._mime_type = mime_type - self._progress = 0 - self._strategy = None - self._total_size = total_size - - @classmethod - def from_file(cls, filename, mime_type=None, auto_transfer=True, **kwds): - """Create a new Upload object from a filename. - - :type filename: str - :param filename: path/filename to the file being uploaded - - :type mime_type: str - :param mime_type: MIMEtype of the file being uploaded - - :type auto_transfer: bool - :param auto_transfer: - (Optional) should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Upload` - :returns: The upload initiated from the file passed. - """ - path = os.path.expanduser(filename) - if not mime_type: - mime_type, _ = mimetypes.guess_type(path) - if mime_type is None: - raise ValueError( - 'Could not guess mime type for %s' % path) - size = os.stat(path).st_size - return cls(open(path, 'rb'), mime_type, total_size=size, - close_stream=True, auto_transfer=auto_transfer, **kwds) - - @classmethod - def from_stream(cls, stream, mime_type, - total_size=None, auto_transfer=True, **kwds): - """Create a new Upload object from a stream. - - :type stream: writable file-like object - :param stream: the target file - - :type mime_type: str - :param mime_type: MIMEtype of the file being uploaded - - :type total_size: int - :param total_size: (Optional) Size of the file being uploaded - - :type auto_transfer: bool - :param auto_transfer: - (Optional) should the transfer be started immediately - - :type kwds: dict - :param kwds: keyword arguments: passed - through to :meth:`_Transfer.__init__()`. - - :rtype: :class:`Upload` - :returns: The upload initiated from the stream passed. - """ - if mime_type is None: - raise ValueError( - 'No mime_type specified for stream') - return cls(stream, mime_type, total_size=total_size, - close_stream=False, auto_transfer=auto_transfer, **kwds) - - @property - def complete(self): - """Has the entire stream been uploaded. - - :rtype: bool - :returns: Boolean indicated if the upload is complete. - """ - return self._complete - - @property - def mime_type(self): - """MIMEtype of the file being uploaded. - - :rtype: str - :returns: The mime-type of the upload. - """ - return self._mime_type - - @property - def progress(self): - """Bytes uploaded so far - - :rtype: int - :returns: The amount uploaded so far. - """ - return self._progress - - @property - def strategy(self): - """Upload strategy to use - - :rtype: str or None - :returns: The strategy used to upload the data. - """ - return self._strategy - - @strategy.setter - def strategy(self, value): - """Update upload strategy to use - - :type value: str (one of :data:`SIMPLE_UPLOAD` or - :data:`RESUMABLE_UPLOAD`) - - :raises: :exc:`ValueError` if value is not one of the two allowed - strings. - """ - if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD): - raise ValueError(( - 'Invalid value "%s" for upload strategy, must be one of ' - '"simple" or "resumable".') % value) - self._strategy = value - - @property - def total_size(self): - """Total size of the stream to be uploaded. - - :rtype: int or None - :returns: The total size to be uploaded. - """ - return self._total_size - - @total_size.setter - def total_size(self, value): - """Update total size of the stream to be uploaded. - - :type value: int - :param value: (Optional) the size - """ - self._ensure_uninitialized() - self._total_size = value - - def __repr__(self): - if not self.initialized: - return 'Upload (uninitialized)' - else: - return 'Upload with %d/%s bytes transferred for url %s' % ( - self.progress, self.total_size or '???', self.url) - - def _set_default_strategy(self, upload_config, http_request): - """Determine and set the default upload strategy for this upload. - - We generally prefer simple or multipart, unless we're forced to - use resumable. This happens when any of (1) the upload is too - large, (2) the simple endpoint doesn't support multipart requests - and we have metadata, or (3) there is no simple upload endpoint. - - :type upload_config: instance w/ ``max_size`` and ``accept`` - attributes - :param upload_config: Configuration for the upload endpoint. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: The associated http request. - """ - if upload_config.resumable_path is None: - self.strategy = SIMPLE_UPLOAD - if self.strategy is not None: - return - strategy = SIMPLE_UPLOAD - if (self.total_size is not None and - self.total_size > RESUMABLE_UPLOAD_THRESHOLD): - strategy = RESUMABLE_UPLOAD - if http_request.body and not upload_config.simple_multipart: - strategy = RESUMABLE_UPLOAD - if not upload_config.simple_path: - strategy = RESUMABLE_UPLOAD - self.strategy = strategy - - def configure_request(self, upload_config, http_request, url_builder): - """Configure the request and url for this upload. - - :type upload_config: instance w/ ``max_size`` and ``accept`` - attributes - :param upload_config: transfer policy object to be queried - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be updated - - :type url_builder: instance with settable 'relative_path' and - 'query_params' attributes. - :param url_builder: transfer policy object to be updated - - :raises: :exc:`ValueError` if the requested upload is too big, - or does not have an acceptable MIME type. - """ - # Validate total_size vs. max_size - if (self.total_size and upload_config.max_size and - self.total_size > upload_config.max_size): - raise ValueError( - 'Upload too big: %s larger than max size %s' % ( - self.total_size, upload_config.max_size)) - # Validate mime type - if not acceptable_mime_type(upload_config.accept, self.mime_type): - raise ValueError( - 'MIME type %s does not match any accepted MIME ranges %s' % ( - self.mime_type, upload_config.accept)) - - self._set_default_strategy(upload_config, http_request) - if self.strategy == SIMPLE_UPLOAD: - url_builder.relative_path = upload_config.simple_path - if http_request.body: - url_builder.query_params['uploadType'] = 'multipart' - self._configure_multipart_request(http_request) - else: - url_builder.query_params['uploadType'] = 'media' - self._configure_media_request(http_request) - else: - url_builder.relative_path = upload_config.resumable_path - url_builder.query_params['uploadType'] = 'resumable' - self._configure_resumable_request(http_request) - - def _configure_media_request(self, http_request): - """Helper for 'configure_request': set up simple request.""" - http_request.headers['content-type'] = self.mime_type - http_request.body = self.stream.read() - http_request.loggable_body = '' - - def _configure_multipart_request(self, http_request): - """Helper for 'configure_request': set up multipart request.""" - # This is a multipart/related upload. - msg_root = mime_multipart.MIMEMultipart('related') - # msg_root should not write out its own headers - setattr(msg_root, '_write_headers', lambda self: None) - - # attach the body as one part - msg = mime_nonmultipart.MIMENonMultipart( - *http_request.headers['content-type'].split('/')) - msg.set_payload(http_request.body) - msg_root.attach(msg) - - # attach the media as the second part - msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/')) - msg['Content-Transfer-Encoding'] = 'binary' - msg.set_payload(self.stream.read()) - msg_root.attach(msg) - - # NOTE: generate multipart message as bytes, not text - stream = six.BytesIO() - if six.PY3: # pragma: NO COVER Python3 - generator_class = email_generator.BytesGenerator - else: - generator_class = email_generator.Generator - generator = generator_class(stream, mangle_from_=False) - generator.flatten(msg_root, unixfrom=False) - http_request.body = stream.getvalue() - - multipart_boundary = msg_root.get_boundary() - http_request.headers['content-type'] = ( - 'multipart/related; boundary="%s"' % multipart_boundary) - - boundary_bytes = _to_bytes(multipart_boundary) - body_components = http_request.body.split(boundary_bytes) - headers, _, _ = body_components[-2].partition(b'\n\n') - body_components[-2] = b'\n\n'.join([headers, b'\n\n--']) - http_request.loggable_body = boundary_bytes.join(body_components) - - def _configure_resumable_request(self, http_request): - """Helper for 'configure_request': set up resumable request.""" - http_request.headers['X-Upload-Content-Type'] = self.mime_type - if self.total_size is not None: - http_request.headers[ - 'X-Upload-Content-Length'] = str(self.total_size) - - def refresh_upload_state(self): - """Refresh the state of a resumable upload via query to the back-end. - """ - if self.strategy != RESUMABLE_UPLOAD: - return - self._ensure_initialized() - # NOTE: Per RFC 2616[1]/7231[2], a 'PUT' request is inappropriate - # here: it is intended to be used to replace the entire - # resource, not to query for a status. - # - # If the back-end doesn't provide a way to query for this state - # via a 'GET' request, somebody should be spanked. - # - # The violation is documented[3]. - # - # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html#sec9.6 - # [2] http://tools.ietf.org/html/rfc7231#section-4.3.4 - # [3] - # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#resume-upload - refresh_request = Request( - url=self.url, http_method='PUT', - headers={'Content-Range': 'bytes */*'}) - refresh_response = make_api_request( - self.http, refresh_request, redirections=0, - retries=self.num_retries) - range_header = self._get_range_header(refresh_response) - if refresh_response.status_code in (http_client.OK, - http_client.CREATED): - self._complete = True - self._progress = self.total_size - self.stream.seek(self.progress) - # If we're finished, the refresh response will contain the metadata - # originally requested. Cache it so it can be returned in - # StreamInChunks. - self._final_response = refresh_response - elif refresh_response.status_code == RESUME_INCOMPLETE: - if range_header is None: - self._progress = 0 - else: - self._progress = self._last_byte(range_header) + 1 - self.stream.seek(self.progress) - else: - raise HttpError.from_response(refresh_response) - - @staticmethod - def _get_range_header(response): - """Return a 'Range' header from a response. - - :type response: :class:`google.cloud.streaming.http_wrapper.Response` - :param response: response to be queried - - :rtype: str - :returns: The header used to determine the bytes range. - """ - # NOTE: Per RFC 2616[1]/7233[2][3], 'Range' is a request header, - # not a response header. If the back-end is actually setting - # 'Range' on responses, somebody should be spanked: it should - # be sending 'Content-Range' (including the # '/' - # trailer). - # - # The violation is documented[4]. - # - # [1] http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html - # [2] http://tools.ietf.org/html/rfc7233#section-3.1 - # [3] http://tools.ietf.org/html/rfc7233#section-4.2 - # [4] - # https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#chunking - return response.info.get('Range', response.info.get('range')) - - def initialize_upload(self, http_request, http): - """Initialize this upload from the given http_request. - - :type http_request: :class:`~.streaming.http_wrapper.Request` - :param http_request: the request to be used - - :type http: :class:`httplib2.Http` (or workalike) - :param http: Http instance for this request. - - :raises: :exc:`ValueError` if the instance has not been configured - with a strategy. - :rtype: :class:`~google.cloud.streaming.http_wrapper.Response` - :returns: The response if the upload is resumable and auto transfer - is not used. - """ - if self.strategy is None: - raise ValueError( - 'No upload strategy set; did you call configure_request?') - if self.strategy != RESUMABLE_UPLOAD: - return - self._ensure_uninitialized() - http_response = make_api_request(http, http_request, - retries=self.num_retries) - if http_response.status_code != http_client.OK: - raise HttpError.from_response(http_response) - - granularity = http_response.info.get('X-Goog-Upload-Chunk-Granularity') - if granularity is not None: - granularity = int(granularity) - self._server_chunk_granularity = granularity - url = http_response.info['location'] - self._initialize(http, url) - - # Unless the user has requested otherwise, we want to just - # go ahead and pump the bytes now. - if self.auto_transfer: - return self.stream_file(use_chunks=True) - else: - return http_response - - @staticmethod - def _last_byte(range_header): - """Parse the last byte from a 'Range' header. - - :type range_header: str - :param range_header: 'Range' header value per RFC 2616/7233 - - :rtype: int - :returns: The last byte from a range header. - """ - _, _, end = range_header.partition('-') - return int(end) - - def _validate_chunksize(self, chunksize=None): - """Validate chunksize against server-specified granularity. - - Helper for :meth:`stream_file`. - - :type chunksize: int - :param chunksize: (Optional) the chunk size to be tested. - - :raises: :exc:`ValueError` if ``chunksize`` is not a multiple - of the server-specified granulariy. - """ - if self._server_chunk_granularity is None: - return - chunksize = chunksize or self.chunksize - if chunksize % self._server_chunk_granularity: - raise ValueError( - 'Server requires chunksize to be a multiple of %d', - self._server_chunk_granularity) - - def stream_file(self, use_chunks=True): - """Upload the stream. - - :type use_chunks: bool - :param use_chunks: If False, send the stream in a single request. - Otherwise, send it in chunks. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response for the final request made. - """ - if self.strategy != RESUMABLE_UPLOAD: - raise ValueError( - 'Cannot stream non-resumable upload') - # final_response is set if we resumed an already-completed upload. - response = self._final_response - send_func = self._send_chunk if use_chunks else self._send_media_body - if use_chunks: - self._validate_chunksize(self.chunksize) - self._ensure_initialized() - while not self.complete: - response = send_func(self.stream.tell()) - if response.status_code in (http_client.OK, http_client.CREATED): - self._complete = True - break - self._progress = self._last_byte(response.info['range']) - if self.progress + 1 != self.stream.tell(): - raise CommunicationError( - 'Failed to transfer all bytes in chunk, upload paused at ' - 'byte %d' % self.progress) - if self.complete and hasattr(self.stream, 'seek'): - if not hasattr(self.stream, 'seekable') or self.stream.seekable(): - current_pos = self.stream.tell() - self.stream.seek(0, os.SEEK_END) - end_pos = self.stream.tell() - self.stream.seek(current_pos) - if current_pos != end_pos: - raise TransferInvalidError( - 'Upload complete with %s ' - 'additional bytes left in stream' % - (int(end_pos) - int(current_pos))) - return response - - def _send_media_request(self, request, end): - """Peform API upload request. - - Helper for _send_media_body & _send_chunk: - - :type request: :class:`google.cloud.streaming.http_wrapper.Request` - :param request: the request to upload - - :type end: int - :param end: end byte of the to be uploaded - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: the response - :raises: :exc:`~.streaming.exceptions.HttpError` if the status - code from the response indicates an error. - """ - response = make_api_request( - self.bytes_http, request, retries=self.num_retries) - if response.status_code not in (http_client.OK, http_client.CREATED, - RESUME_INCOMPLETE): - # We want to reset our state to wherever the server left us - # before this failed request, and then raise. - self.refresh_upload_state() - raise HttpError.from_response(response) - if response.status_code == RESUME_INCOMPLETE: - last_byte = self._last_byte( - self._get_range_header(response)) - if last_byte + 1 != end: - self.stream.seek(last_byte) - return response - - def _send_media_body(self, start): - """Send the entire stream in a single request. - - Helper for :meth:`stream_file`: - - :type start: int - :param start: start byte of the range. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response from the media upload request. - """ - self._ensure_initialized() - if self.total_size is None: - raise TransferInvalidError( - 'Total size must be known for SendMediaBody') - body_stream = StreamSlice(self.stream, self.total_size - start) - - request = Request(url=self.url, http_method='PUT', body=body_stream) - request.headers['Content-Type'] = self.mime_type - if start == self.total_size: - # End of an upload with 0 bytes left to send; just finalize. - range_string = 'bytes */%s' % self.total_size - else: - range_string = 'bytes %s-%s/%s' % (start, self.total_size - 1, - self.total_size) - - request.headers['Content-Range'] = range_string - - return self._send_media_request(request, self.total_size) - - def _send_chunk(self, start): - """Send a chunk of the stream. - - Helper for :meth:`stream_file`: - - :type start: int - :param start: start byte of the range. - - :rtype: :class:`google.cloud.streaming.http_wrapper.Response` - :returns: The response from the chunked upload request. - """ - self._ensure_initialized() - no_log_body = self.total_size is None - if self.total_size is None: - # For the streaming resumable case, we need to detect when - # we're at the end of the stream. - body_stream = BufferedStream( - self.stream, start, self.chunksize) - end = body_stream.stream_end_position - if body_stream.stream_exhausted: - self._total_size = end - # Here, change body_stream from a stream to a string object, - # which means reading a chunk into memory. This works around - # https://code.google.com/p/httplib2/issues/detail?id=176 which can - # cause httplib2 to skip bytes on 401's for file objects. - body_stream = body_stream.read(self.chunksize) - else: - end = min(start + self.chunksize, self.total_size) - body_stream = StreamSlice(self.stream, end - start) - request = Request(url=self.url, http_method='PUT', body=body_stream) - request.headers['Content-Type'] = self.mime_type - if no_log_body: - # Disable logging of streaming body. - request.loggable_body = '' - if self.total_size is None: - # Streaming resumable upload case, unknown total size. - range_string = 'bytes %s-%s/*' % (start, end - 1) - elif end == start: - # End of an upload with 0 bytes left to send; just finalize. - range_string = 'bytes */%s' % self.total_size - else: - # Normal resumable upload case with known sizes. - range_string = 'bytes %s-%s/%s' % (start, end - 1, self.total_size) - - request.headers['Content-Range'] = range_string - - return self._send_media_request(request, end) diff --git a/core/google/cloud/streaming/util.py b/core/google/cloud/streaming/util.py deleted file mode 100644 index e896052f8a1c..000000000000 --- a/core/google/cloud/streaming/util.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Assorted utilities shared between parts of apitools.""" - -import random - - -_MAX_RETRY_WAIT = 60 - - -def calculate_wait_for_retry(retry_attempt): - """Calculate the amount of time to wait before a retry attempt. - - Wait time grows exponentially with the number of attempts. A - random amount of jitter is added to spread out retry attempts from - different clients. - - :type retry_attempt: int - :param retry_attempt: Retry attempt counter. - - :rtype: int - :returns: Number of seconds to wait before retrying request. - """ - wait_time = 2 ** retry_attempt - max_jitter = wait_time / 4.0 - wait_time += random.uniform(-max_jitter, max_jitter) - return max(1, min(wait_time, _MAX_RETRY_WAIT)) - - -def acceptable_mime_type(accept_patterns, mime_type): - """Check that ``mime_type`` matches one of ``accept_patterns``. - - Note that this function assumes that all patterns in accept_patterns - will be simple types of the form "type/subtype", where one or both - of these can be "*". We do not support parameters (i.e. "; q=") in - patterns. - - :type accept_patterns: list of string - :param accept_patterns: acceptable MIME types. - - :type mime_type: str - :param mime_type: the MIME being checked - - :rtype: bool - :returns: True if the supplied MIME type matches at least one of the - patterns, else False. - """ - if '/' not in mime_type: - raise ValueError( - 'Invalid MIME type: "%s"' % mime_type) - unsupported_patterns = [p for p in accept_patterns if ';' in p] - if unsupported_patterns: - raise ValueError( - 'MIME patterns with parameter unsupported: "%s"' % ', '.join( - unsupported_patterns)) - - def _match(pattern, mime_type): - """Return True iff mime_type is acceptable for pattern.""" - return all(accept in ('*', provided) for accept, provided - in zip(pattern.split('/'), mime_type.split('/'))) - - return any(_match(pattern, mime_type) for pattern in accept_patterns) diff --git a/core/tests/unit/streaming/__init__.py b/core/tests/unit/streaming/__init__.py deleted file mode 100644 index 58e0d9153632..000000000000 --- a/core/tests/unit/streaming/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/core/tests/unit/streaming/test_buffered_stream.py b/core/tests/unit/streaming/test_buffered_stream.py deleted file mode 100644 index 797ceea2d280..000000000000 --- a/core/tests/unit/streaming/test_buffered_stream.py +++ /dev/null @@ -1,141 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_BufferedStream(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.buffered_stream import BufferedStream - - return BufferedStream - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_closed_stream(self): - class _Stream(object): - closed = True - - start = 0 - bufsize = 4 - bufstream = self._make_one(_Stream, start, bufsize) - self.assertIs(bufstream._stream, _Stream) - self.assertEqual(bufstream._start_pos, start) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, b'') - self.assertTrue(bufstream._stream_at_end) - self.assertEqual(bufstream._end_pos, 0) - - def test_ctor_start_zero_longer_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertIs(bufstream._stream, stream) - self.assertEqual(bufstream._start_pos, START) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, CONTENT[:BUFSIZE]) - self.assertEqual(len(bufstream), BUFSIZE) - self.assertFalse(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, BUFSIZE) - - def test_ctor_start_nonzero_shorter_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 8 - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertIs(bufstream._stream, stream) - self.assertEqual(bufstream._start_pos, START) - self.assertEqual(bufstream._buffer_pos, 0) - self.assertEqual(bufstream._buffered_data, CONTENT[START:]) - self.assertEqual(len(bufstream), len(CONTENT) - START) - self.assertTrue(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, len(CONTENT)) - - def test__bytes_remaining_start_zero_longer_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream._bytes_remaining, BUFSIZE) - - def test__bytes_remaining_start_zero_shorter_than_buffer(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 8 - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream._bytes_remaining, len(CONTENT) - START) - - def test_read_w_none(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - with self.assertRaises(ValueError): - bufstream.read(None) - - def test_read_w_negative_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - with self.assertRaises(ValueError): - bufstream.read(-2) - - def test_read_from_start(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = 0 - BUFSIZE = 4 - stream = BytesIO(CONTENT) - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertEqual(bufstream.read(4), CONTENT[:4]) - - def test_read_exhausted(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - START = len(CONTENT) - BUFSIZE = 10 - stream = BytesIO(CONTENT) - stream.read(START) # already consumed - bufstream = self._make_one(stream, START, BUFSIZE) - self.assertTrue(bufstream.stream_exhausted) - self.assertEqual(bufstream.stream_end_position, len(CONTENT)) - self.assertEqual(bufstream._bytes_remaining, 0) - self.assertEqual(bufstream.read(10), b'') diff --git a/core/tests/unit/streaming/test_exceptions.py b/core/tests/unit/streaming/test_exceptions.py deleted file mode 100644 index b31c562c8e9d..000000000000 --- a/core/tests/unit/streaming/test_exceptions.py +++ /dev/null @@ -1,105 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_HttpError(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.exceptions import HttpError - - return HttpError - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - exception = self._make_one(RESPONSE, CONTENT, URL) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.status_code, 404) - self.assertEqual( - str(exception), - "HttpError accessing : " - "response: <{'status': '404'}>, content ") - - def test_from_response(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - - class _Response(object): - info = RESPONSE - content = CONTENT - request_url = URL - - klass = self._get_target_class() - exception = klass.from_response(_Response()) - self.assertIsInstance(exception, klass) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - - -class Test_RetryAfterError(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.exceptions import RetryAfterError - - return RetryAfterError - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - RETRY_AFTER = 60 - exception = self._make_one(RESPONSE, CONTENT, URL, RETRY_AFTER) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.retry_after, RETRY_AFTER) - self.assertEqual( - str(exception), - "HttpError accessing : " - "response: <{'status': '404'}>, content ") - - def test_from_response(self): - RESPONSE = {'status': '404'} - CONTENT = b'CONTENT' - URL = 'http://www.example.com' - RETRY_AFTER = 60 - - class _Response(object): - info = RESPONSE - content = CONTENT - request_url = URL - retry_after = RETRY_AFTER - - klass = self._get_target_class() - exception = klass.from_response(_Response()) - self.assertIsInstance(exception, klass) - self.assertEqual(exception.response, RESPONSE) - self.assertEqual(exception.content, CONTENT) - self.assertEqual(exception.url, URL) - self.assertEqual(exception.retry_after, RETRY_AFTER) diff --git a/core/tests/unit/streaming/test_http_wrapper.py b/core/tests/unit/streaming/test_http_wrapper.py deleted file mode 100644 index b0d3156ba42f..000000000000 --- a/core/tests/unit/streaming/test_http_wrapper.py +++ /dev/null @@ -1,498 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test__httplib2_debug_level(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import _httplib2_debug_level - - return _httplib2_debug_level - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_wo_loggable_body_wo_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - request = _Request() - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL): - self.assertEqual(_httplib2.debuglevel, 0) - - def test_w_loggable_body_wo_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - request = _Request(loggable_body=object()) - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL): - self.assertEqual(_httplib2.debuglevel, LEVEL) - self.assertEqual(_httplib2.debuglevel, 0) - - def test_w_loggable_body_w_http(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - class _Connection(object): - debuglevel = 0 - - def set_debuglevel(self, value): - self.debuglevel = value - - request = _Request(loggable_body=object()) - LEVEL = 1 - _httplib2 = _Dummy(debuglevel=0) - update_me = _Connection() - skip_me = _Connection() - connections = {'update:me': update_me, 'skip_me': skip_me} - _http = _Dummy(connections=connections) - with _Monkey(MUT, httplib2=_httplib2): - with self._make_one(request, LEVEL, _http): - self.assertEqual(_httplib2.debuglevel, LEVEL) - self.assertEqual(update_me.debuglevel, LEVEL) - self.assertEqual(skip_me.debuglevel, 0) - self.assertEqual(_httplib2.debuglevel, 0) - self.assertEqual(update_me.debuglevel, 0) - self.assertEqual(skip_me.debuglevel, 0) - - -class Test_Request(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import Request - - return Request - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - request = self._make_one() - self.assertEqual(request.url, '') - self.assertEqual(request.http_method, 'GET') - self.assertEqual(request.headers, {'content-length': '0'}) - self.assertEqual(request.body, '') - self.assertIsNone(request.loggable_body) - - def test_loggable_body_setter_w_body_None(self): - from google.cloud.streaming.exceptions import RequestError - - request = self._make_one(body=None) - with self.assertRaises(RequestError): - request.loggable_body = 'abc' - - def test_body_setter_w_None(self): - request = self._make_one() - request.loggable_body = 'abc' - request.body = None - self.assertEqual(request.headers, {}) - self.assertIsNone(request.body) - self.assertEqual(request.loggable_body, 'abc') - - def test_body_setter_w_non_string(self): - request = self._make_one() - request.loggable_body = 'abc' - request.body = body = _Dummy(length=123) - self.assertEqual(request.headers, {'content-length': '123'}) - self.assertIs(request.body, body) - self.assertEqual(request.loggable_body, '') - - -class Test_Response(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.http_wrapper import Response - - return Response - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = {'status': '200'} - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT)) - self.assertEqual(response.status_code, 200) - self.assertIsNone(response.retry_after) - self.assertFalse(response.is_redirect) - - def test_length_w_content_encoding_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-122/5678' - info = { - 'status': '200', - 'content-length': len(CONTENT), - 'content-encoding': 'testing', - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), 123) - - def test_length_w_content_encoding_wo_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '200', - 'content-length': len(CONTENT), - 'content-encoding': 'testing', - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT)) - - def test_length_w_content_length_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-12/5678' - info = { - 'status': '200', - 'content-length': len(CONTENT) * 2, - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), len(CONTENT) * 2) - - def test_length_wo_content_length_w_content_range(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - RANGE = 'bytes 0-122/5678' - info = { - 'status': '200', - 'content-range': RANGE, - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(len(response), 123) - - def test_retry_after_w_header(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '200', - 'retry-after': '123', - } - response = self._make_one(info, CONTENT, URL) - self.assertEqual(response.retry_after, 123) - - def test_is_redirect_w_code_wo_location(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '301', - } - response = self._make_one(info, CONTENT, URL) - self.assertFalse(response.is_redirect) - - def test_is_redirect_w_code_w_location(self): - CONTENT = 'CONTENT' - URL = 'http://example.com/api' - info = { - 'status': '301', - 'location': 'http://example.com/other', - } - response = self._make_one(info, CONTENT, URL) - self.assertTrue(response.is_redirect) - - -class Test__check_response(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import _check_response - - return _check_response(*args, **kw) - - def test_w_none(self): - from google.cloud.streaming.exceptions import RequestError - - with self.assertRaises(RequestError): - self._call_fut(None) - - def test_w_TOO_MANY_REQUESTS(self): - from google.cloud.streaming.exceptions import BadStatusCodeError - from google.cloud.streaming.http_wrapper import TOO_MANY_REQUESTS - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(TOO_MANY_REQUESTS)) - - def test_w_50x(self): - from google.cloud.streaming.exceptions import BadStatusCodeError - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(500)) - - with self.assertRaises(BadStatusCodeError): - self._call_fut(_Response(503)) - - def test_w_retry_after(self): - from google.cloud.streaming.exceptions import RetryAfterError - - with self.assertRaises(RetryAfterError): - self._call_fut(_Response(200, 20)) - - def test_pass(self): - self._call_fut(_Response(200)) - - -class Test__reset_http_connections(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import _reset_http_connections - - return _reset_http_connections(*args, **kw) - - def test_wo_connections(self): - http = object() - self._call_fut(http) - - def test_w_connections(self): - connections = {'delete:me': object(), 'skip_me': object()} - http = _Dummy(connections=connections) - self._call_fut(http) - self.assertFalse('delete:me' in connections) - self.assertTrue('skip_me' in connections) - - -class Test___make_api_request_no_retry(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import ( - _make_api_request_no_retry) - return _make_api_request_no_retry(*args, **kw) - - def _verify_requested(self, http, request, - redirections=5, connection_type=None): - self.assertEqual(len(http._requested), 1) - url, kw = http._requested[0] - self.assertEqual(url, request.url) - self.assertEqual(kw['method'], request.http_method) - self.assertEqual(kw['body'], request.body) - self.assertEqual(kw['headers'], request.headers) - self.assertEqual(kw['redirections'], redirections) - self.assertEqual(kw['connection_type'], connection_type) - - def test_defaults_wo_connections(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - _http = _Http((INFO, CONTENT)) - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request) - - def test_w_http_connections_miss(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'https': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request) - - def test_w_http_connections_hit(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - INFO = {'status': '200'} - CONTENT = 'CONTENT' - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'http': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - _checked = [] - with _Monkey(MUT, httplib2=_httplib2, - _check_response=_checked.append): - response = self._call_fut(_http, _request) - - self.assertIsInstance(response, MUT.Response) - self.assertEqual(response.info, INFO) - self.assertEqual(response.content, CONTENT) - self.assertEqual(response.request_url, _request.url) - self.assertEqual(_checked, [response]) - self._verify_requested(_http, _request, connection_type=CONN_TYPE) - - def test_w_request_returning_None(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - from google.cloud.streaming.exceptions import RequestError - - INFO = None - CONTENT = None - CONN_TYPE = object() - _http = _Http((INFO, CONTENT)) - _http.connections = {'http': CONN_TYPE} - _httplib2 = _Dummy(debuglevel=1) - _request = _Request() - with _Monkey(MUT, httplib2=_httplib2): - with self.assertRaises(RequestError): - self._call_fut(_http, _request) - self._verify_requested(_http, _request, connection_type=CONN_TYPE) - - -class Test_make_api_request(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.http_wrapper import make_api_request - - return make_api_request(*args, **kw) - - def test_wo_exception(self): - from google.cloud.streaming import http_wrapper as MUT - from google.cloud._testing import _Monkey - - HTTP, REQUEST, RESPONSE = object(), object(), object() - _created, _checked = [], [] - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - return RESPONSE - - with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - response = self._call_fut(HTTP, REQUEST) - - self.assertIs(response, RESPONSE) - expected_kw = {'redirections': MUT._REDIRECTIONS} - self.assertEqual(_created, [((HTTP, REQUEST), expected_kw)]) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - def test_w_exceptions_lt_max_retries(self): - from google.cloud.streaming.exceptions import RetryAfterError - from google.cloud.streaming import http_wrapper as MUT - from google.cloud._testing import _Monkey - - HTTP, RESPONSE = object(), object() - REQUEST = _Request() - _created, _checked = [], [] - _counter = [None] * 4 - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - if _counter: - _counter.pop() - raise RetryAfterError(RESPONSE, '', REQUEST.url, 0.1) - return RESPONSE - - with _Monkey(MUT, _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - response = self._call_fut(HTTP, REQUEST, retries=5) - - self.assertIs(response, RESPONSE) - self.assertEqual(len(_created), 5) - expected_kw = {'redirections': MUT._REDIRECTIONS} - for attempt in _created: - self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - def test_w_exceptions_gt_max_retries(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import http_wrapper as MUT - - HTTP = object() - REQUEST = _Request() - _created, _checked = [], [] - - def _wo_exception(*args, **kw): - _created.append((args, kw)) - raise ValueError('Retryable') - - with _Monkey(MUT, calculate_wait_for_retry=lambda *ignored: 0.1, - _make_api_request_no_retry=_wo_exception, - _check_response=_checked.append): - with self.assertRaises(ValueError): - self._call_fut(HTTP, REQUEST, retries=3) - - self.assertEqual(len(_created), 3) - expected_kw = {'redirections': MUT._REDIRECTIONS} - for attempt in _created: - self.assertEqual(attempt, ((HTTP, REQUEST), expected_kw)) - self.assertEqual(_checked, []) # not called by '_wo_exception' - - -class _Dummy(object): - def __init__(self, **kw): - self.__dict__.update(kw) - - -class _Request(object): - __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body',) - URL = 'http://example.com/api' - - def __init__(self, url=URL, http_method='GET', body='', - loggable_body=None): - self.url = url - self.http_method = http_method - self.body = body - self.headers = {} - self.loggable_body = loggable_body - - -class _Response(object): - content = '' - request_url = _Request.URL - - def __init__(self, status_code, retry_after=None): - self.info = {'status': status_code} - self.status_code = status_code - self.retry_after = retry_after - - -class _Http(object): - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - - def request(self, url, **kw): - self._requested.append((url, kw)) - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/core/tests/unit/streaming/test_stream_slice.py b/core/tests/unit/streaming/test_stream_slice.py deleted file mode 100644 index 47820078447d..000000000000 --- a/core/tests/unit/streaming/test_stream_slice.py +++ /dev/null @@ -1,90 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_StreamSlice(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.stream_slice import StreamSlice - - return StreamSlice - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertIs(stream_slice._stream, stream) - self.assertEqual(stream_slice._remaining_bytes, MAXSIZE) - self.assertEqual(stream_slice._max_bytes, MAXSIZE) - self.assertEqual(len(stream_slice), MAXSIZE) - self.assertEqual(stream_slice.length, MAXSIZE) - - def test___nonzero___empty(self): - from io import BytesIO - - CONTENT = b'' - MAXSIZE = 0 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertFalse(stream_slice) - - def test___nonzero___nonempty(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertTrue(stream_slice) - - def test_read_exhausted(self): - from io import BytesIO - from six.moves import http_client - - CONTENT = b'' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - with self.assertRaises(http_client.IncompleteRead): - stream_slice.read() - - def test_read_implicit_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertEqual(stream_slice.read(), CONTENT[:MAXSIZE]) - self.assertEqual(stream_slice._remaining_bytes, 0) - - def test_read_explicit_size(self): - from io import BytesIO - - CONTENT = b'CONTENT GOES HERE' - MAXSIZE = 4 - SIZE = 3 - stream = BytesIO(CONTENT) - stream_slice = self._make_one(stream, MAXSIZE) - self.assertEqual(stream_slice.read(SIZE), CONTENT[:SIZE]) - self.assertEqual(stream_slice._remaining_bytes, MAXSIZE - SIZE) diff --git a/core/tests/unit/streaming/test_transfer.py b/core/tests/unit/streaming/test_transfer.py deleted file mode 100644 index 8bafd4a1cc47..000000000000 --- a/core/tests/unit/streaming/test_transfer.py +++ /dev/null @@ -1,2035 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test__Transfer(unittest.TestCase): - URL = 'http://example.com/api' - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import _Transfer - - return _Transfer - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE - - stream = _Stream() - xfer = self._make_one(stream) - self.assertIs(xfer.stream, stream) - self.assertFalse(xfer.close_stream) - self.assertEqual(xfer.chunksize, _DEFAULT_CHUNKSIZE) - self.assertTrue(xfer.auto_transfer) - self.assertIsNone(xfer.bytes_http) - self.assertIsNone(xfer.http) - self.assertEqual(xfer.num_retries, 5) - self.assertIsNone(xfer.url) - self.assertFalse(xfer.initialized) - - def test_ctor_explicit(self): - stream = _Stream() - HTTP = object() - CHUNK_SIZE = 1 << 18 - NUM_RETRIES = 8 - xfer = self._make_one(stream, - close_stream=True, - chunksize=CHUNK_SIZE, - auto_transfer=False, - http=HTTP, - num_retries=NUM_RETRIES) - self.assertIs(xfer.stream, stream) - self.assertTrue(xfer.close_stream) - self.assertEqual(xfer.chunksize, CHUNK_SIZE) - self.assertFalse(xfer.auto_transfer) - self.assertIs(xfer.bytes_http, HTTP) - self.assertIs(xfer.http, HTTP) - self.assertEqual(xfer.num_retries, NUM_RETRIES) - - def test_bytes_http_fallback_to_http(self): - stream = _Stream() - HTTP = object() - xfer = self._make_one(stream, http=HTTP) - self.assertIs(xfer.bytes_http, HTTP) - - def test_bytes_http_setter(self): - stream = _Stream() - HTTP = object() - BYTES_HTTP = object() - xfer = self._make_one(stream, http=HTTP) - xfer.bytes_http = BYTES_HTTP - self.assertIs(xfer.bytes_http, BYTES_HTTP) - - def test_num_retries_setter_invalid(self): - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(ValueError): - xfer.num_retries = object() - - def test_num_retries_setter_negative(self): - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(ValueError): - xfer.num_retries = -1 - - def test__initialize_not_already_initialized_w_http(self): - HTTP = object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - self.assertTrue(xfer.initialized) - self.assertIs(xfer.http, HTTP) - self.assertIs(xfer.url, self.URL) - - def test__initialize_not_already_initialized_wo_http(self): - from httplib2 import Http - - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(None, self.URL) - self.assertTrue(xfer.initialized) - self.assertIsInstance(xfer.http, Http) - self.assertIs(xfer.url, self.URL) - - def test__initialize_w_existing_http(self): - HTTP_1, HTTP_2 = object(), object() - stream = _Stream() - xfer = self._make_one(stream, http=HTTP_1) - xfer._initialize(HTTP_2, self.URL) - self.assertTrue(xfer.initialized) - self.assertIs(xfer.http, HTTP_1) - self.assertIs(xfer.url, self.URL) - - def test__initialize_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - URL_2 = 'http://example.com/other' - HTTP_1, HTTP_2 = object(), object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP_1, self.URL) - with self.assertRaises(TransferInvalidError): - xfer._initialize(HTTP_2, URL_2) - - def test__ensure_initialized_hit(self): - HTTP = object() - stream = _Stream() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - xfer._ensure_initialized() # no raise - - def test__ensure_initialized_miss(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - stream = _Stream() - xfer = self._make_one(stream) - with self.assertRaises(TransferInvalidError): - xfer._ensure_initialized() - - def test__ensure_uninitialized_hit(self): - stream = _Stream() - xfer = self._make_one(stream) - xfer._ensure_uninitialized() # no raise - - def test__ensure_uninitialized_miss(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - stream = _Stream() - HTTP = object() - xfer = self._make_one(stream) - xfer._initialize(HTTP, self.URL) - with self.assertRaises(TransferInvalidError): - xfer._ensure_uninitialized() - - def test___del___closes_stream(self): - - stream = _Stream() - xfer = self._make_one(stream, close_stream=True) - - self.assertFalse(stream._closed) - del xfer - self.assertTrue(stream._closed) - - -class Test_Download(unittest.TestCase): - URL = "http://example.com/api" - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import Download - - return Download - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - stream = _Stream() - download = self._make_one(stream) - self.assertIs(download.stream, stream) - self.assertIsNone(download._initial_response) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.total_size) - self.assertIsNone(download.encoding) - - def test_ctor_w_kwds(self): - stream = _Stream() - CHUNK_SIZE = 123 - download = self._make_one(stream, chunksize=CHUNK_SIZE) - self.assertIs(download.stream, stream) - self.assertEqual(download.chunksize, CHUNK_SIZE) - - def test_ctor_w_total_size(self): - stream = _Stream() - SIZE = 123 - download = self._make_one(stream, total_size=SIZE) - self.assertIs(download.stream, stream) - self.assertEqual(download.total_size, SIZE) - - def test_from_file_w_existing_file_no_override(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.out') - with open(filename, 'w') as fileobj: - fileobj.write('EXISTING FILE') - with self.assertRaises(ValueError): - klass.from_file(filename) - - def test_from_file_w_existing_file_w_override_wo_auto_transfer(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.out') - with open(filename, 'w') as fileobj: - fileobj.write('EXISTING FILE') - download = klass.from_file(filename, overwrite=True, - auto_transfer=False) - self.assertFalse(download.auto_transfer) - del download # closes stream - with open(filename, 'rb') as fileobj: - self.assertEqual(fileobj.read(), b'') - - def test_from_stream_defaults(self): - stream = _Stream() - klass = self._get_target_class() - download = klass.from_stream(stream) - self.assertIs(download.stream, stream) - self.assertTrue(download.auto_transfer) - self.assertIsNone(download.total_size) - - def test_from_stream_explicit(self): - CHUNK_SIZE = 1 << 18 - SIZE = 123 - stream = _Stream() - klass = self._get_target_class() - download = klass.from_stream(stream, auto_transfer=False, - total_size=SIZE, chunksize=CHUNK_SIZE) - self.assertIs(download.stream, stream) - self.assertFalse(download.auto_transfer) - self.assertEqual(download.total_size, SIZE) - self.assertEqual(download.chunksize, CHUNK_SIZE) - - def test_configure_request(self): - CHUNK_SIZE = 100 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - request = _Dummy(headers={}) - url_builder = _Dummy(query_params={}) - download.configure_request(request, url_builder) - self.assertEqual(request.headers, {'Range': 'bytes=0-99'}) - self.assertEqual(url_builder.query_params, {'alt': 'media'}) - - def test__set_total_wo_content_range_wo_existing_total(self): - info = {} - download = self._make_one(_Stream()) - download._set_total(info) - self.assertEqual(download.total_size, 0) - - def test__set_total_wo_content_range_w_existing_total(self): - SIZE = 123 - info = {} - download = self._make_one(_Stream(), total_size=SIZE) - download._set_total(info) - self.assertEqual(download.total_size, SIZE) - - def test__set_total_w_content_range_w_existing_total(self): - SIZE = 123 - info = {'content-range': 'bytes 123-234/4567'} - download = self._make_one(_Stream(), total_size=SIZE) - download._set_total(info) - self.assertEqual(download.total_size, 4567) - - def test__set_total_w_content_range_w_asterisk_total(self): - info = {'content-range': 'bytes 123-234/*'} - download = self._make_one(_Stream()) - download._set_total(info) - self.assertEqual(download.total_size, 0) - - def test_initialize_download_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - request = _Request() - download = self._make_one(_Stream()) - download._initialize(None, self.URL) - with self.assertRaises(TransferInvalidError): - download.initialize_download(request, http=object()) - - def test_initialize_download_wo_autotransfer(self): - request = _Request() - http = object() - download = self._make_one(_Stream(), auto_transfer=False) - download.initialize_download(request, http) - self.assertIs(download.http, http) - self.assertEqual(download.url, request.url) - - def test_initialize_download_w_autotransfer_failing(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - - request = _Request() - http = object() - download = self._make_one(_Stream(), auto_transfer=True) - - response = _makeResponse(http_client.BAD_REQUEST) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - with self.assertRaises(HttpError): - download.initialize_download(request, http) - - self.assertTrue(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test_initialize_download_w_autotransfer_w_content_location(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - REDIRECT_URL = 'http://example.com/other' - request = _Request() - http = object() - info = {'content-location': REDIRECT_URL} - download = self._make_one(_Stream(), auto_transfer=True) - - response = _makeResponse(http_client.NO_CONTENT, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - download.initialize_download(request, http) - - self.assertIsNone(download._initial_response) - self.assertEqual(download.total_size, 0) - self.assertIs(download.http, http) - self.assertEqual(download.url, REDIRECT_URL) - self.assertTrue(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test__normalize_start_end_w_end_w_start_lt_0(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(-1, 0) - - def test__normalize_start_end_w_end_w_start_gt_total(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(3, 0) - - def test__normalize_start_end_w_end_lt_start(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - - with self.assertRaises(TransferInvalidError): - download._normalize_start_end(1, 0) - - def test__normalize_start_end_w_end_gt_start(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._normalize_start_end(1, 2), (1, 1)) - - def test__normalize_start_end_wo_end_w_start_lt_0(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/2'}) - self.assertEqual(download._normalize_start_end(-2), (0, 1)) - self.assertEqual(download._normalize_start_end(-1), (1, 1)) - - def test__normalize_start_end_wo_end_w_start_ge_0(self): - download = self._make_one(_Stream()) - download._set_total({'content-range': 'bytes 0-1/100'}) - self.assertEqual(download._normalize_start_end(0), (0, 99)) - self.assertEqual(download._normalize_start_end(1), (1, 99)) - - def test__set_range_header_w_start_lt_0(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, -1) - self.assertEqual(request.headers['range'], 'bytes=-1') - - def test__set_range_header_w_start_ge_0_wo_end(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, 0) - self.assertEqual(request.headers['range'], 'bytes=0-') - - def test__set_range_header_w_start_ge_0_w_end(self): - request = _Request() - download = self._make_one(_Stream()) - download._set_range_header(request, 0, 1) - self.assertEqual(request.headers['range'], 'bytes=0-1') - - def test__compute_end_byte_w_start_lt_0_w_end(self): - download = self._make_one(_Stream()) - self.assertEqual(download._compute_end_byte(-1, 1), 1) - - def test__compute_end_byte_w_start_ge_0_wo_end_w_use_chunks(self): - CHUNK_SIZE = 5 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._compute_end_byte(0, use_chunks=True), 4) - - def test__compute_end_byte_w_start_ge_0_w_end_w_use_chunks(self): - CHUNK_SIZE = 5 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - self.assertEqual(download._compute_end_byte(0, 3, use_chunks=True), 3) - self.assertEqual(download._compute_end_byte(0, 5, use_chunks=True), 4) - - def test__compute_end_byte_w_start_ge_0_w_end_w_total_size(self): - CHUNK_SIZE = 50 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - download._set_total({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._compute_end_byte(0, 100, use_chunks=False), - 9) - self.assertEqual(download._compute_end_byte(0, 8, use_chunks=False), 8) - - def test__compute_end_byte_w_start_ge_0_wo_end_w_total_size(self): - CHUNK_SIZE = 50 - download = self._make_one(_Stream(), chunksize=CHUNK_SIZE) - download._set_total({'content-range': 'bytes 0-1/10'}) - self.assertEqual(download._compute_end_byte(0, use_chunks=False), 9) - - def test__get_chunk_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download._get_chunk(0, 10) - - def test__get_chunk(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - http = object() - download = self._make_one(_Stream()) - download._initialize(http, self.URL) - response = _makeResponse(http_client.OK) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - found = download._get_chunk(0, 10) - - self.assertIs(found, response) - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers['range'], 'bytes=0-10') - - def test__process_response_w_FORBIDDEN(self): - from google.cloud.streaming.exceptions import HttpError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.FORBIDDEN) - with self.assertRaises(HttpError): - download._process_response(response) - - def test__process_response_w_NOT_FOUND(self): - from google.cloud.streaming.exceptions import HttpError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.NOT_FOUND) - with self.assertRaises(HttpError): - download._process_response(response) - - def test__process_response_w_other_error(self): - from google.cloud.streaming.exceptions import TransferRetryError - from six.moves import http_client - - download = self._make_one(_Stream()) - response = _makeResponse(http_client.BAD_REQUEST) - with self.assertRaises(TransferRetryError): - download._process_response(response) - - def test__process_response_w_OK_wo_encoding(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse(http_client.OK, content='OK') - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['OK']) - self.assertEqual(download.progress, 2) - self.assertIsNone(download.encoding) - - def test__process_response_w_PARTIAL_CONTENT_w_encoding(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - info = {'content-encoding': 'blah'} - response = _makeResponse(http_client.OK, info, 'PARTIAL') - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['PARTIAL']) - self.assertEqual(download.progress, 7) - self.assertEqual(download.encoding, 'blah') - - def test__process_response_w_REQUESTED_RANGE_NOT_SATISFIABLE(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse( - http_client.REQUESTED_RANGE_NOT_SATISFIABLE) - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, []) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.encoding) - - def test__process_response_w_NO_CONTENT(self): - from six.moves import http_client - - stream = _Stream() - download = self._make_one(stream) - response = _makeResponse(status_code=http_client.NO_CONTENT) - found = download._process_response(response) - self.assertIs(found, response) - self.assertEqual(stream._written, ['']) - self.assertEqual(download.progress, 0) - self.assertIsNone(download.encoding) - - def test_get_range_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - download.get_range(0, 10) - - def test_get_range_wo_total_size_complete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - REQ_RANGE = 'bytes=0-%d' % (LEN,) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, LEN) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_wo_total_size_wo_end(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - START = 5 - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) - RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT[START:]) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(START) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT[START:]]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_total_size_partial(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - PARTIAL_LEN = 5 - REQ_RANGE = 'bytes=0-%d' % (PARTIAL_LEN,) - RESP_RANGE = 'bytes 0-%d/%d' % (PARTIAL_LEN, LEN,) - http = object() - stream = _Stream() - download = self._make_one(stream, total_size=LEN) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT[:PARTIAL_LEN]) - response.length = LEN - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, PARTIAL_LEN) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT[:PARTIAL_LEN]]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_empty_chunk(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import TransferRetryError - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - START = 5 - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=%d-%d' % (START, START + CHUNK_SIZE - 1,) - RESP_RANGE = 'bytes %d-%d/%d' % (START, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(TransferRetryError): - download.get_range(START) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, ['']) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_total_size_wo_use_chunks(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - CHUNK_SIZE = 3 - REQ_RANGE = 'bytes=0-%d' % (LEN - 1,) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - download = self._make_one(stream, total_size=LEN, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0, use_chunks=False) - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_get_range_w_multiple_chunks(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDE' - LEN = len(CONTENT) - CHUNK_SIZE = 3 - REQ_RANGE_1 = 'bytes=0-%d' % (CHUNK_SIZE - 1,) - RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN) - REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) - RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN) - http = object() - stream = _Stream() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - download._initialize(http, self.URL) - info_1 = {'content-range': RESP_RANGE_1} - response_1 = _makeResponse(http_client.PARTIAL_CONTENT, info_1, - CONTENT[:CHUNK_SIZE]) - info_2 = {'content-range': RESP_RANGE_2} - response_2 = _makeResponse(http_client.OK, info_2, - CONTENT[CHUNK_SIZE:]) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.get_range(0) - - self.assertTrue(len(requester._requested), 2) - request_1 = requester._requested[0][0] - self.assertEqual(request_1.headers, {'range': REQ_RANGE_1}) - request_2 = requester._requested[1][0] - self.assertEqual(request_2.headers, {'range': REQ_RANGE_2}) - self.assertEqual(stream._written, [b'ABC', b'DE']) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - download = self._make_one(_Stream()) - - with self.assertRaises(TransferInvalidError): - download.stream_file() - - def test_stream_file_w_initial_response_complete(self): - from six.moves import http_client - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - stream = _Stream() - download = self._make_one(stream, total_size=LEN) - info = {'content-range': RESP_RANGE} - download._initial_response = _makeResponse( - http_client.OK, info, CONTENT) - http = object() - download._initialize(http, _Request.URL) - - download.stream_file() - - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_w_initial_response_incomplete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CHUNK_SIZE = 3 - CONTENT = b'ABCDEF' - LEN = len(CONTENT) - RESP_RANGE_1 = 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, LEN,) - REQ_RANGE_2 = 'bytes=%d-%d' % (CHUNK_SIZE, LEN - 1) - RESP_RANGE_2 = 'bytes %d-%d/%d' % (CHUNK_SIZE, LEN - 1, LEN,) - stream = _Stream() - http = object() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - info_1 = {'content-range': RESP_RANGE_1} - download._initial_response = _makeResponse( - http_client.PARTIAL_CONTENT, info_1, CONTENT[:CHUNK_SIZE]) - info_2 = {'content-range': RESP_RANGE_2} - response_2 = _makeResponse( - http_client.OK, info_2, CONTENT[CHUNK_SIZE:]) - requester = _MakeRequest(response_2) - - download._initialize(http, _Request.URL) - - request = _Request() - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.stream_file() - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE_2}) - self.assertEqual(stream._written, - [CONTENT[:CHUNK_SIZE], CONTENT[CHUNK_SIZE:]]) - self.assertEqual(download.total_size, LEN) - - def test_stream_file_wo_initial_response_wo_total_size(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - CHUNK_SIZE = 123 - REQ_RANGE = 'bytes=0-%d' % (CHUNK_SIZE - 1) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - stream = _Stream() - http = object() - download = self._make_one(stream, chunksize=CHUNK_SIZE) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - download._initialize(http, _Request.URL) - - request = _Request() - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - download.stream_file() - - self.assertTrue(len(requester._requested), 1) - request = requester._requested[0][0] - self.assertEqual(request.headers, {'range': REQ_RANGE}) - self.assertEqual(stream._written, [CONTENT]) - self.assertEqual(download.total_size, LEN) - - -class Test_Upload(unittest.TestCase): - URL = "http://example.com/api" - MIME_TYPE = 'application/octet-stream' - UPLOAD_URL = 'http://example.com/upload/id=foobar' - - @staticmethod - def _get_target_class(): - from google.cloud.streaming.transfer import Upload - - return Upload - - def _make_one(self, stream, mime_type=MIME_TYPE, *args, **kw): - return self._get_target_class()(stream, mime_type, *args, **kw) - - def test_ctor_defaults(self): - from google.cloud.streaming.transfer import _DEFAULT_CHUNKSIZE - - stream = _Stream() - upload = self._make_one(stream) - self.assertIs(upload.stream, stream) - self.assertIsNone(upload._final_response) - self.assertIsNone(upload._server_chunk_granularity) - self.assertFalse(upload.complete) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertEqual(upload.progress, 0) - self.assertIsNone(upload.strategy) - self.assertIsNone(upload.total_size) - self.assertEqual(upload.chunksize, _DEFAULT_CHUNKSIZE) - - def test_ctor_w_kwds(self): - stream = _Stream() - CHUNK_SIZE = 123 - upload = self._make_one(stream, chunksize=CHUNK_SIZE) - self.assertIs(upload.stream, stream) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - - def test_from_file_w_nonesuch_file(self): - klass = self._get_target_class() - filename = '~nosuchuser/file.txt' - with self.assertRaises(OSError): - klass.from_file(filename) - - def test_from_file_wo_mimetype_w_unguessable_filename(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ UNGUESSABLE MIMETYPE' - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.unguessable') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - with self.assertRaises(ValueError): - klass.from_file(filename) - - def test_from_file_wo_mimetype_w_guessable_filename(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.txt') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - upload = klass.from_file(filename) - self.assertEqual(upload.mime_type, 'text/plain') - self.assertTrue(upload.auto_transfer) - self.assertEqual(upload.total_size, len(CONTENT)) - upload._stream.close() - - def test_from_file_w_mimetype_w_auto_transfer_w_kwds(self): - import os - from google.cloud._testing import _tempdir - - klass = self._get_target_class() - CONTENT = b'EXISTING FILE W/ GUESSABLE MIMETYPE' - CHUNK_SIZE = 3 - with _tempdir() as tempdir: - filename = os.path.join(tempdir, 'file.unguessable') - with open(filename, 'wb') as fileobj: - fileobj.write(CONTENT) - upload = klass.from_file( - filename, - mime_type=self.MIME_TYPE, - auto_transfer=False, - chunksize=CHUNK_SIZE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertFalse(upload.auto_transfer) - self.assertEqual(upload.total_size, len(CONTENT)) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - upload._stream.close() - - def test_from_stream_wo_mimetype(self): - klass = self._get_target_class() - stream = _Stream() - with self.assertRaises(ValueError): - klass.from_stream(stream, mime_type=None) - - def test_from_stream_defaults(self): - klass = self._get_target_class() - stream = _Stream() - upload = klass.from_stream(stream, mime_type=self.MIME_TYPE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertTrue(upload.auto_transfer) - self.assertIsNone(upload.total_size) - - def test_from_stream_explicit(self): - klass = self._get_target_class() - stream = _Stream() - SIZE = 10 - CHUNK_SIZE = 3 - upload = klass.from_stream( - stream, - mime_type=self.MIME_TYPE, - auto_transfer=False, - total_size=SIZE, - chunksize=CHUNK_SIZE) - self.assertEqual(upload.mime_type, self.MIME_TYPE) - self.assertFalse(upload.auto_transfer) - self.assertEqual(upload.total_size, SIZE) - self.assertEqual(upload.chunksize, CHUNK_SIZE) - - def test_strategy_setter_invalid(self): - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.strategy = object() - with self.assertRaises(ValueError): - upload.strategy = 'unknown' - - def test_strategy_setter_SIMPLE_UPLOAD(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test_strategy_setter_RESUMABLE_UPLOAD(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test_total_size_setter_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - SIZE = 123 - upload = self._make_one(_Stream) - http = object() - upload._initialize(http, _Request.URL) - with self.assertRaises(TransferInvalidError): - upload.total_size = SIZE - - def test_total_size_setter_not_initialized(self): - SIZE = 123 - upload = self._make_one(_Stream) - upload.total_size = SIZE - self.assertEqual(upload.total_size, SIZE) - - def test__set_default_strategy_w_existing_strategy(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - config = _Dummy( - resumable_path='/resumable/endpoint', - simple_multipart=True, - simple_path='/upload/endpoint', - ) - request = _Request() - upload = self._make_one(_Stream) - upload.strategy = RESUMABLE_UPLOAD - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_wo_resumable_path(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - config = _Dummy( - resumable_path=None, - simple_multipart=True, - simple_path='/upload/endpoint', - ) - request = _Request() - upload = self._make_one(_Stream()) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test__set_default_strategy_w_total_size_gt_threshhold(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD_THRESHOLD - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - config = _UploadConfig() - request = _Request() - upload = self._make_one( - _Stream(), total_size=RESUMABLE_UPLOAD_THRESHOLD + 1) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_wo_multipart(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - config.simple_multipart = False - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_w_multipart_wo_simple_path(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - config.simple_path = None - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, RESUMABLE_UPLOAD) - - def test__set_default_strategy_w_body_w_multipart_w_simple_path(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - config = _UploadConfig() - request = _Request(body=CONTENT) - upload = self._make_one(_Stream(), total_size=len(CONTENT)) - upload._set_default_strategy(config, request) - self.assertEqual(upload.strategy, SIMPLE_UPLOAD) - - def test_configure_request_w_total_size_gt_max_size(self): - MAX_SIZE = 1000 - config = _UploadConfig() - config.max_size = MAX_SIZE - request = _Request() - url_builder = _Dummy() - upload = self._make_one(_Stream(), total_size=MAX_SIZE + 1) - with self.assertRaises(ValueError): - upload.configure_request(config, request, url_builder) - - def test_configure_request_w_invalid_mimetype(self): - config = _UploadConfig() - config.accept = ('text/*',) - request = _Request() - url_builder = _Dummy() - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.configure_request(config, request, url_builder) - - def test_configure_request_w_simple_wo_body(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'CONTENT' - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = SIMPLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'media'}) - self.assertEqual(url_builder.relative_path, config.simple_path) - - self.assertEqual(request.headers, {'content-type': self.MIME_TYPE}) - self.assertEqual(request.body, CONTENT) - self.assertEqual(request.loggable_body, '') - - def test_configure_request_w_simple_w_body(self): - from google.cloud._helpers import _to_bytes - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - CONTENT = b'CONTENT' - BODY = b'BODY' - config = _UploadConfig() - request = _Request(body=BODY) - request.headers['content-type'] = 'text/plain' - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = SIMPLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'multipart'}) - self.assertEqual(url_builder.relative_path, config.simple_path) - - self.assertEqual(list(request.headers), ['content-type']) - ctype, boundary = [x.strip() - for x in request.headers['content-type'].split(';')] - self.assertEqual(ctype, 'multipart/related') - self.assertTrue(boundary.startswith('boundary="==')) - self.assertTrue(boundary.endswith('=="')) - - divider = b'--' + _to_bytes(boundary[len('boundary="'):-1]) - chunks = request.body.split(divider)[1:-1] # discard prolog / epilog - self.assertEqual(len(chunks), 2) - - parse_chunk = _email_chunk_parser() - text_msg = parse_chunk(chunks[0].strip()) - self.assertEqual(dict(text_msg._headers), - {'Content-Type': 'text/plain', - 'MIME-Version': '1.0'}) - self.assertEqual(text_msg._payload, BODY.decode('ascii')) - - app_msg = parse_chunk(chunks[1].strip()) - self.assertEqual(dict(app_msg._headers), - {'Content-Type': self.MIME_TYPE, - 'Content-Transfer-Encoding': 'binary', - 'MIME-Version': '1.0'}) - self.assertEqual(app_msg._payload, CONTENT.decode('ascii')) - self.assertTrue(b'' in request.loggable_body) - - def test_configure_request_w_resumable_wo_total_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'CONTENT' - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.strategy = RESUMABLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) - self.assertEqual(url_builder.relative_path, config.resumable_path) - - self.assertEqual(request.headers, - {'X-Upload-Content-Type': self.MIME_TYPE}) - - def test_configure_request_w_resumable_w_total_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'CONTENT' - LEN = len(CONTENT) - config = _UploadConfig() - request = _Request() - url_builder = _Dummy(query_params={}) - upload = self._make_one(_Stream(CONTENT)) - upload.total_size = LEN - upload.strategy = RESUMABLE_UPLOAD - - upload.configure_request(config, request, url_builder) - - self.assertEqual(url_builder.query_params, {'uploadType': 'resumable'}) - self.assertEqual(url_builder.relative_path, config.resumable_path) - - self.assertEqual(request.headers, - {'X-Upload-Content-Type': self.MIME_TYPE, - 'X-Upload-Content-Length': '%d' % (LEN,)}) - - def test_refresh_upload_state_w_simple_strategy(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - upload.refresh_upload_state() # no-op - - def test_refresh_upload_state_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - with self.assertRaises(TransferInvalidError): - upload.refresh_upload_state() - - def test_refresh_upload_state_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.OK, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertTrue(upload.complete) - self.assertEqual(upload.progress, LEN) - self.assertEqual(stream.tell(), LEN) - self.assertIs(upload._final_response, response) - - def test_refresh_upload_state_w_CREATED(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - RESP_RANGE = 'bytes 0-%d/%d' % (LEN - 1, LEN,) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'content-range': RESP_RANGE} - response = _makeResponse(http_client.CREATED, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertTrue(upload.complete) - self.assertEqual(upload.progress, LEN) - self.assertEqual(stream.tell(), LEN) - self.assertIs(upload._final_response, response) - - def test_refresh_upload_state_w_RESUME_INCOMPLETE_w_range(self): - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud._testing import _Monkey - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - LAST = 5 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - info = {'range': '0-%d' % (LAST - 1,)} - response = _makeResponse(RESUME_INCOMPLETE, info, CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertFalse(upload.complete) - self.assertEqual(upload.progress, LAST) - self.assertEqual(stream.tell(), LAST) - self.assertIsNot(upload._final_response, response) - - def test_refresh_upload_state_w_RESUME_INCOMPLETE_wo_range(self): - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud._testing import _Monkey - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - response = _makeResponse(RESUME_INCOMPLETE, content=CONTENT) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.refresh_upload_state() - - self.assertFalse(upload.complete) - self.assertEqual(upload.progress, 0) - self.assertEqual(stream.tell(), 0) - self.assertIsNot(upload._final_response, response) - - def test_refresh_upload_state_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - LEN = len(CONTENT) - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=LEN) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, _Request.URL) - response = _makeResponse(http_client.FORBIDDEN) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(HttpError): - upload.refresh_upload_state() - - def test__get_range_header_miss(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None) - self.assertIsNone(upload._get_range_header(response)) - - def test__get_range_header_w_Range(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None, {'Range': '123'}) - self.assertEqual(upload._get_range_header(response), '123') - - def test__get_range_header_w_range(self): - upload = self._make_one(_Stream()) - response = _makeResponse(None, {'range': '123'}) - self.assertEqual(upload._get_range_header(response), '123') - - def test_initialize_upload_no_strategy(self): - request = _Request() - upload = self._make_one(_Stream()) - with self.assertRaises(ValueError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_simple_w_http(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - upload.initialize_upload(request, http=object()) # no-op - - def test_initialize_upload_resumable_already_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(None, self.URL) - with self.assertRaises(TransferInvalidError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_w_http_resumable_not_initialized_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream()) - upload.strategy = RESUMABLE_UPLOAD - response = _makeResponse(http_client.FORBIDDEN) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - with self.assertRaises(HttpError): - upload.initialize_upload(request, http=object()) - - def test_initialize_upload_w_http_wo_auto_transfer_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - request = _Request() - upload = self._make_one(_Stream(), auto_transfer=False) - upload.strategy = RESUMABLE_UPLOAD - info = {'location': self.UPLOAD_URL} - response = _makeResponse(http_client.OK, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - upload.initialize_upload(request, http=object()) - - self.assertIsNone(upload._server_chunk_granularity) - self.assertEqual(upload.url, self.UPLOAD_URL) - self.assertEqual(requester._responses, []) - self.assertEqual(len(requester._requested), 1) - self.assertIs(requester._requested[0][0], request) - - def test_initialize_upload_w_granularity_w_auto_transfer_w_OK(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - request = _Request() - upload = self._make_one(_Stream(CONTENT), chunksize=1000) - upload.strategy = RESUMABLE_UPLOAD - info = {'X-Goog-Upload-Chunk-Granularity': '100', - 'location': self.UPLOAD_URL} - response = _makeResponse(http_client.OK, info) - chunk_response = _makeResponse(http_client.OK) - requester = _MakeRequest(response, chunk_response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - upload.initialize_upload(request, http) - - self.assertEqual(upload._server_chunk_granularity, 100) - self.assertEqual(upload.url, self.UPLOAD_URL) - self.assertEqual(requester._responses, []) - self.assertEqual(len(requester._requested), 2) - self.assertIs(requester._requested[0][0], request) - chunk_request = requester._requested[1][0] - self.assertIsInstance(chunk_request, _Request) - self.assertEqual(chunk_request.url, self.UPLOAD_URL) - self.assertEqual(chunk_request.http_method, 'PUT') - self.assertEqual(chunk_request.body, CONTENT) - - def test__last_byte(self): - upload = self._make_one(_Stream()) - self.assertEqual(upload._last_byte('123-456'), 456) - - def test__validate_chunksize_wo__server_chunk_granularity(self): - upload = self._make_one(_Stream()) - upload._validate_chunksize(123) # no-op - - def test__validate_chunksize_w__server_chunk_granularity_miss(self): - upload = self._make_one(_Stream()) - upload._server_chunk_granularity = 100 - with self.assertRaises(ValueError): - upload._validate_chunksize(123) - - def test__validate_chunksize_w__server_chunk_granularity_hit(self): - upload = self._make_one(_Stream()) - upload._server_chunk_granularity = 100 - upload._validate_chunksize(400) - - def test_stream_file_w_simple_strategy(self): - from google.cloud.streaming.transfer import SIMPLE_UPLOAD - - upload = self._make_one(_Stream()) - upload.strategy = SIMPLE_UPLOAD - with self.assertRaises(ValueError): - upload.stream_file() - - def test_stream_file_w_use_chunks_invalid_chunk_size(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream(), chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 100 - with self.assertRaises(ValueError): - upload.stream_file(use_chunks=True) - - def test_stream_file_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - upload = self._make_one(_Stream(), chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - with self.assertRaises(TransferInvalidError): - upload.stream_file() - - def test_stream_file_already_complete_w_unseekable_stream(self): - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - http = object() - stream = object() - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(), response) - - def test_stream_file_already_complete_w_seekable_stream_unsynced(self): - from google.cloud.streaming.exceptions import CommunicationError - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - with self.assertRaises(CommunicationError): - upload.stream_file() - - def test_stream_file_already_complete_wo_seekable_method_synced(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_already_complete_w_seekable_method_true_synced(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _StreamWithSeekableMethod(CONTENT, True) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_already_complete_w_seekable_method_false(self): - import os - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _StreamWithSeekableMethod(CONTENT, False) - stream.seek(0, os.SEEK_END) - response = object() - upload = self._make_one(stream, chunksize=1024) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 128 - upload._initialize(http, _Request.URL) - upload._final_response = response - upload._complete = True - self.assertIs(upload.stream_file(use_chunks=False), response) - - def test_stream_file_incomplete(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, chunksize=6) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 6 - upload._initialize(http, self.UPLOAD_URL) - - info_1 = {'content-length': '0', 'range': 'bytes=0-5'} - response_1 = _makeResponse(RESUME_INCOMPLETE, info_1) - info_2 = {'content-length': '0', 'range': 'bytes=6-9'} - response_2 = _makeResponse(http_client.OK, info_2) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - response = upload.stream_file() - - self.assertIs(response, response_2) - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 2) - - request_1 = requester._requested[0][0] - self.assertEqual(request_1.url, self.UPLOAD_URL) - self.assertEqual(request_1.http_method, 'PUT') - self.assertEqual(request_1.headers, - {'Content-Range': 'bytes 0-5/*', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request_1.body, CONTENT[:6]) - - request_2 = requester._requested[1][0] - self.assertEqual(request_2.url, self.UPLOAD_URL) - self.assertEqual(request_2.http_method, 'PUT') - self.assertEqual(request_2.headers, - {'Content-Range': 'bytes 6-9/10', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request_2.body, CONTENT[6:]) - - def test_stream_file_incomplete_w_transfer_error(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import CommunicationError - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, chunksize=6) - upload.strategy = RESUMABLE_UPLOAD - upload._server_chunk_granularity = 6 - upload._initialize(http, self.UPLOAD_URL) - - info = { - 'content-length': '0', - 'range': 'bytes=0-4', # simulate error, s.b. '0-5' - } - response = _makeResponse(RESUME_INCOMPLETE, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, - Request=_Request, - make_api_request=requester): - with self.assertRaises(CommunicationError): - upload.stream_file() - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 1) - - request = requester._requested[0][0] - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.headers, - {'Content-Range': 'bytes 0-5/*', - 'Content-Type': self.MIME_TYPE}) - self.assertEqual(request.body, CONTENT[:6]) - - def test__send_media_request_wo_error(self): - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - - CONTENT = b'ABCDEFGHIJ' - bytes_http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream) - upload.bytes_http = bytes_http - - headers = {'Content-Range': 'bytes 0-9/10', - 'Content-Type': self.MIME_TYPE} - request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) - info = {'content-length': '0', 'range': 'bytes=0-4'} - response = _makeResponse(RESUME_INCOMPLETE, info) - requester = _MakeRequest(response) - - with _Monkey(MUT, make_api_request=requester): - upload._send_media_request(request, 9) - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 1) - used_request, used_http, _ = requester._requested[0] - self.assertIs(used_request, request) - self.assertIs(used_http, bytes_http) - self.assertEqual(stream.tell(), 4) - - def test__send_media_request_w_error(self): - from six.moves import http_client - from google.cloud._testing import _Monkey - from google.cloud.streaming import transfer as MUT - from google.cloud.streaming.exceptions import HttpError - from google.cloud.streaming.http_wrapper import RESUME_INCOMPLETE - from google.cloud.streaming.transfer import RESUMABLE_UPLOAD - - CONTENT = b'ABCDEFGHIJ' - bytes_http = object() - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream) - upload.strategy = RESUMABLE_UPLOAD - upload._initialize(http, self.UPLOAD_URL) - upload.bytes_http = bytes_http - - headers = {'Content-Range': 'bytes 0-9/10', - 'Content-Type': self.MIME_TYPE} - request = _Request(self.UPLOAD_URL, 'PUT', CONTENT, headers) - info_1 = {'content-length': '0', 'range': 'bytes=0-4'} - response_1 = _makeResponse(http_client.FORBIDDEN, info_1) - info_2 = {'Content-Length': '0', 'Range': 'bytes=0-4'} - response_2 = _makeResponse(RESUME_INCOMPLETE, info_2) - requester = _MakeRequest(response_1, response_2) - - with _Monkey(MUT, Request=_Request, make_api_request=requester): - with self.assertRaises(HttpError): - upload._send_media_request(request, 9) - - self.assertEqual(len(requester._responses), 0) - self.assertEqual(len(requester._requested), 2) - first_request, first_http, _ = requester._requested[0] - self.assertIs(first_request, request) - self.assertIs(first_http, bytes_http) - second_request, second_http, _ = requester._requested[1] - self.assertEqual(second_request.url, self.UPLOAD_URL) - self.assertEqual(second_request.http_method, 'PUT') # ACK! - self.assertEqual(second_request.headers, - {'Content-Range': 'bytes */*'}) - self.assertIs(second_http, http) - - def test__send_media_body_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - upload = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - upload._send_media_body(0) - - def test__send_media_body_wo_total_size(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - http = object() - upload = self._make_one(_Stream()) - upload._initialize(http, _Request.URL) - with self.assertRaises(TransferInvalidError): - upload._send_media_body(0) - - def test__send_media_body_start_lt_total_size(self): - from google.cloud.streaming.stream_slice import StreamSlice - - SIZE = 1234 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_media_body(0) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), SIZE) - self.assertEqual(request.headers, - {'content-length': '%d' % (SIZE,), # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) - self.assertEqual(end, SIZE) - - def test__send_media_body_start_eq_total_size(self): - from google.cloud.streaming.stream_slice import StreamSlice - - SIZE = 1234 - http = object() - stream = _Stream() - upload = self._make_one(stream, total_size=SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_media_body(SIZE) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), 0) - self.assertEqual(request.headers, - {'content-length': '0', # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes */%d' % (SIZE,)}) - self.assertEqual(end, SIZE) - - def test__send_chunk_not_initialized(self): - from google.cloud.streaming.exceptions import TransferInvalidError - - upload = self._make_one(_Stream()) - with self.assertRaises(TransferInvalidError): - upload._send_chunk(0) - - def test__send_chunk_wo_total_size_stream_exhausted(self): - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - http = object() - upload = self._make_one(_Stream(CONTENT), chunksize=1000) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - self.assertIsNone(upload.total_size) - - found = upload._send_chunk(0) - - self.assertIs(found, response) - self.assertEqual(upload.total_size, SIZE) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.body, CONTENT) - self.assertEqual(request.headers, - {'content-length': '%d' % SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (SIZE - 1, SIZE)}) - self.assertEqual(end, SIZE) - - def test__send_chunk_wo_total_size_stream_not_exhausted(self): - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = SIZE - 5 - http = object() - upload = self._make_one(_Stream(CONTENT), chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - self.assertIsNone(upload.total_size) - - found = upload._send_chunk(0) - - self.assertIs(found, response) - self.assertIsNone(upload.total_size) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - self.assertEqual(request.body, CONTENT[:CHUNK_SIZE]) - expected_headers = { - 'content-length': '%d' % CHUNK_SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/*' % (CHUNK_SIZE - 1,), - } - self.assertEqual(request.headers, expected_headers) - self.assertEqual(end, CHUNK_SIZE) - - def test__send_chunk_w_total_size_stream_not_exhausted(self): - from google.cloud.streaming.stream_slice import StreamSlice - - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = SIZE - 5 - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_chunk(0) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), CHUNK_SIZE) - expected_headers = { - 'content-length': '%d' % CHUNK_SIZE, # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes 0-%d/%d' % (CHUNK_SIZE - 1, SIZE), - } - self.assertEqual(request.headers, expected_headers) - self.assertEqual(end, CHUNK_SIZE) - - def test__send_chunk_w_total_size_stream_exhausted(self): - from google.cloud.streaming.stream_slice import StreamSlice - - CONTENT = b'ABCDEFGHIJ' - SIZE = len(CONTENT) - CHUNK_SIZE = 1000 - http = object() - stream = _Stream(CONTENT) - upload = self._make_one(stream, total_size=SIZE, chunksize=CHUNK_SIZE) - upload._initialize(http, self.UPLOAD_URL) - response = object() - streamer = _MediaStreamer(response) - upload._send_media_request = streamer - - found = upload._send_chunk(SIZE) - - self.assertIs(found, response) - request, end = streamer._called_with - self.assertEqual(request.url, self.UPLOAD_URL) - self.assertEqual(request.http_method, 'PUT') - body_stream = request.body - self.assertIsInstance(body_stream, StreamSlice) - self.assertIs(body_stream._stream, stream) - self.assertEqual(len(body_stream), 0) - self.assertEqual(request.headers, - {'content-length': '0', # speling! - 'Content-Type': self.MIME_TYPE, - 'Content-Range': 'bytes */%d' % (SIZE,)}) - self.assertEqual(end, SIZE) - - -def _email_chunk_parser(): - import six - - if six.PY3: # pragma: NO COVER Python3 - from email.parser import BytesParser - - parser = BytesParser() - return parser.parsebytes - else: - from email.parser import Parser - - parser = Parser() - return parser.parsestr - - -class _Dummy(object): - def __init__(self, **kw): - self.__dict__.update(kw) - - -class _UploadConfig(object): - accept = ('*/*',) - max_size = None - resumable_path = '/resumable/endpoint' - simple_multipart = True - simple_path = '/upload/endpoint' - - -class _Stream(object): - _closed = False - - def __init__(self, to_read=b''): - import io - - self._written = [] - self._to_read = io.BytesIO(to_read) - - def write(self, to_write): - self._written.append(to_write) - - def seek(self, offset, whence=0): - self._to_read.seek(offset, whence) - - def read(self, size=None): - if size is not None: - return self._to_read.read(size) - return self._to_read.read() - - def tell(self): - return self._to_read.tell() - - def close(self): - self._closed = True - - -class _StreamWithSeekableMethod(_Stream): - - def __init__(self, to_read=b'', seekable=True): - super(_StreamWithSeekableMethod, self).__init__(to_read) - self._seekable = seekable - - def seekable(self): - return self._seekable - - -class _Request(object): - __slots__ = ('url', 'http_method', 'body', 'headers', 'loggable_body') - URL = 'http://example.com/api' - - def __init__(self, url=URL, http_method='GET', body='', headers=None): - self.url = url - self.http_method = http_method - self.body = self.loggable_body = body - if headers is None: - headers = {} - self.headers = headers - - -class _MakeRequest(object): - - def __init__(self, *responses): - self._responses = list(responses) - self._requested = [] - - def __call__(self, http, request, **kw): - self._requested.append((request, http, kw)) - return self._responses.pop(0) - - -def _makeResponse(status_code, info=None, content='', - request_url=_Request.URL): - if info is None: - info = {} - return _Dummy(status_code=status_code, - info=info, - content=content, - length=len(content), - request_url=request_url) - - -class _MediaStreamer(object): - - _called_with = None - - def __init__(self, response): - self._response = response - - def __call__(self, request, end): - assert self._called_with is None - self._called_with = (request, end) - return self._response diff --git a/core/tests/unit/streaming/test_util.py b/core/tests/unit/streaming/test_util.py deleted file mode 100644 index 4da788182cb9..000000000000 --- a/core/tests/unit/streaming/test_util.py +++ /dev/null @@ -1,66 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_calculate_wait_for_retry(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.util import calculate_wait_for_retry - - return calculate_wait_for_retry(*args, **kw) - - def test_w_negative_jitter_lt_max_wait(self): - import random - from google.cloud._testing import _Monkey - - with _Monkey(random, uniform=lambda lower, upper: lower): - self.assertEqual(self._call_fut(1), 1.5) - - def test_w_positive_jitter_gt_max_wait(self): - import random - from google.cloud._testing import _Monkey - - with _Monkey(random, uniform=lambda lower, upper: upper): - self.assertEqual(self._call_fut(4), 20) - - -class Test_acceptable_mime_type(unittest.TestCase): - - def _call_fut(self, *args, **kw): - from google.cloud.streaming.util import acceptable_mime_type - - return acceptable_mime_type(*args, **kw) - - def test_pattern_wo_slash(self): - with self.assertRaises(ValueError) as err: - self._call_fut(['text/*'], 'BOGUS') - self.assertEqual( - err.exception.args, - ('Invalid MIME type: "BOGUS"',)) - - def test_accept_pattern_w_semicolon(self): - with self.assertRaises(ValueError) as err: - self._call_fut(['text/*;charset=utf-8'], 'text/plain') - self.assertEqual( - err.exception.args, - ('MIME patterns with parameter unsupported: ' - '"text/*;charset=utf-8"',)) - - def test_miss(self): - self.assertFalse(self._call_fut(['image/*'], 'text/plain')) - - def test_hit(self): - self.assertTrue(self._call_fut(['text/*'], 'text/plain')) diff --git a/setup.cfg b/setup.cfg index 2a9acf13daa9..79874b747939 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,5 @@ [bdist_wheel] universal = 1 + +[tool:pytest] +addopts = --tb=native From d52d547c498eea65406089ee7711a867ef880681 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Sat, 22 Jul 2017 16:38:30 -0700 Subject: [PATCH 44/62] Fix pylint for the main package --- core/.flake8 | 1 + core/google/__init__.py | 2 ++ core/google/cloud/__init__.py | 2 ++ core/google/cloud/_helpers.py | 5 +++-- core/google/cloud/_http.py | 4 +++- core/google/cloud/_testing.py | 14 +++++++++----- core/google/cloud/client.py | 2 +- core/google/cloud/future/operation.py | 2 +- core/google/cloud/iam.py | 6 +++--- core/google/cloud/iterator.py | 5 ++++- core/google/cloud/operation.py | 4 ++-- core/nox.py | 3 ++- 12 files changed, 33 insertions(+), 17 deletions(-) diff --git a/core/.flake8 b/core/.flake8 index 25168dc87605..7f4ddb8072b0 100644 --- a/core/.flake8 +++ b/core/.flake8 @@ -1,4 +1,5 @@ [flake8] +import-order-style=google exclude = __pycache__, .git, diff --git a/core/google/__init__.py b/core/google/__init__.py index b2b833373882..a35569c36339 100644 --- a/core/google/__init__.py +++ b/core/google/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/__init__.py b/core/google/cloud/__init__.py index b2b833373882..59a804265f5c 100644 --- a/core/google/cloud/__init__.py +++ b/core/google/cloud/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google Cloud namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/_helpers.py b/core/google/cloud/_helpers.py index 72918e064507..8dc9bf1cf412 100644 --- a/core/google/cloud/_helpers.py +++ b/core/google/cloud/_helpers.py @@ -17,7 +17,6 @@ This module is not part of the public API surface. """ -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import import calendar @@ -32,6 +31,8 @@ import google_auth_httplib2 try: + # pylint: disable=ungrouped-imports + # We must import google.auth.transport.grpc within this try: catch. import grpc import google.auth.transport.grpc except ImportError: # pragma: NO COVER @@ -104,7 +105,7 @@ def top(self): :rtype: object :returns: the top-most item, or None if the stack is empty. """ - if len(self._stack) > 0: + if self._stack: return self._stack[-1] diff --git a/core/google/cloud/_http.py b/core/google/cloud/_http.py index e1a481e581a7..ada60b4fb2c3 100644 --- a/core/google/cloud/_http.py +++ b/core/google/cloud/_http.py @@ -279,7 +279,9 @@ def api_request(self, method, path, query_params=None, can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :raises: Exception if the response code is not 200 OK. + :raises ~google.cloud.exceptions.GoogleCloudError: if the response code + is not 200 OK. + :raises TypeError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. diff --git a/core/google/cloud/_testing.py b/core/google/cloud/_testing.py index a544fffc5fe4..871b5f631bc7 100644 --- a/core/google/cloud/_testing.py +++ b/core/google/cloud/_testing.py @@ -14,17 +14,15 @@ """Shared testing utilities.""" - -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import class _Monkey(object): - # context-manager for replacing module names in the scope of a test. + """Context-manager for replacing module names in the scope of a test.""" def __init__(self, module, **kw): self.module = module - if len(kw) == 0: # pragma: NO COVER + if not kw: # pragma: NO COVER raise ValueError('_Monkey was used with nothing to monkey-patch') self.to_restore = {key: getattr(module, key) for key in kw} for key, value in kw.items(): @@ -68,8 +66,12 @@ def _tempdir_mgr(): return _tempdir_mgr +# pylint: disable=invalid-name +# Retain _tempdir as a constant for backwards compatibility despite +# being an invalid name. _tempdir = _tempdir_maker() del _tempdir_maker +# pylint: enable=invalid-name class _GAXBaseAPI(object): @@ -79,7 +81,8 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - def _make_grpc_error(self, status_code, trailing=None): + @staticmethod + def _make_grpc_error(status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous @@ -111,6 +114,7 @@ def __init__(self, *pages, **kwargs): self.page_token = kwargs.get('page_token') def next(self): + """Iterate to the next page.""" import six return six.next(self._pages) diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index 9bdbf507d201..5fa7f7ef95a2 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -64,7 +64,7 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. - :raises: :class:`TypeError` if there is a conflict with the kwargs + :raises TypeError: if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: diff --git a/core/google/cloud/future/operation.py b/core/google/cloud/future/operation.py index 5bbfda1a8f0b..8064e5c13e1f 100644 --- a/core/google/cloud/future/operation.py +++ b/core/google/cloud/future/operation.py @@ -34,7 +34,7 @@ class Operation(base.PollingFuture): initial operation. refresh (Callable[[], Operation]): A callable that returns the latest state of the operation. - cancel (Callable[[], None]), A callable that tries to cancel + cancel (Callable[[], None]): A callable that tries to cancel the operation. result_type (type): The protobuf type for the operation's result. metadata_type (type): The protobuf type for the operation's diff --git a/core/google/cloud/iam.py b/core/google/cloud/iam.py index 49bb11266cee..bbc31c047a85 100644 --- a/core/google/cloud/iam.py +++ b/core/google/cloud/iam.py @@ -226,14 +226,14 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if len(self._bindings) > 0: + if self._bindings: bindings = resource['bindings'] = [] for role, members in sorted(self._bindings.items()): - if len(members) > 0: + if members: bindings.append( {'role': role, 'members': sorted(set(members))}) - if len(bindings) == 0: + if not bindings: del resource['bindings'] return resource diff --git a/core/google/cloud/iterator.py b/core/google/cloud/iterator.py index 7bb708e90f09..742443ddc5f9 100644 --- a/core/google/cloud/iterator.py +++ b/core/google/cloud/iterator.py @@ -242,7 +242,8 @@ def _page_iter(self, increment): results per page while an items iterator will want to increment per item. - Yields :class:`Page` instances. + :rtype: :class:`Page` + :returns: pages """ page = self._next_page() while page is not None: @@ -387,6 +388,8 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. + + :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': diff --git a/core/google/cloud/operation.py b/core/google/cloud/operation.py index 4e700a553e4f..9f53c595f658 100644 --- a/core/google/cloud/operation.py +++ b/core/google/cloud/operation.py @@ -50,7 +50,7 @@ def register_type(klass, type_url=None): :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. - :raises: ValueError if a registration already exists for the URL. + :raises ValueError: if a registration already exists for the URL. """ if type_url is None: type_url = _compute_type_url(klass) @@ -258,7 +258,7 @@ def poll(self): :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises: :class:`~exceptions.ValueError` if the operation + :raises ValueError: if the operation has already completed. """ if self.complete: diff --git a/core/nox.py b/core/nox.py index c8f4a942e7a2..8f025cce8b61 100644 --- a/core/nox.py +++ b/core/nox.py @@ -50,7 +50,8 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', 'pylint', 'gcp-devrel-py-tools') + session.install( + 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') session.install('.') session.run('flake8', 'google/cloud/core') session.run( From 216c995beb9b4bec48743200222d9acdac6a4f5d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Sat, 22 Jul 2017 16:39:52 -0700 Subject: [PATCH 45/62] Revert "Fix pylint for the main package" - accidental push This reverts commit d0479d6beb082646f85cafa54a5659fcd9bdbebd. --- core/.flake8 | 1 - core/google/__init__.py | 2 -- core/google/cloud/__init__.py | 2 -- core/google/cloud/_helpers.py | 5 ++--- core/google/cloud/_http.py | 4 +--- core/google/cloud/_testing.py | 14 +++++--------- core/google/cloud/client.py | 2 +- core/google/cloud/future/operation.py | 2 +- core/google/cloud/iam.py | 6 +++--- core/google/cloud/iterator.py | 5 +---- core/google/cloud/operation.py | 4 ++-- core/nox.py | 3 +-- 12 files changed, 17 insertions(+), 33 deletions(-) diff --git a/core/.flake8 b/core/.flake8 index 7f4ddb8072b0..25168dc87605 100644 --- a/core/.flake8 +++ b/core/.flake8 @@ -1,5 +1,4 @@ [flake8] -import-order-style=google exclude = __pycache__, .git, diff --git a/core/google/__init__.py b/core/google/__init__.py index a35569c36339..b2b833373882 100644 --- a/core/google/__init__.py +++ b/core/google/__init__.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google namespace package.""" - try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/__init__.py b/core/google/cloud/__init__.py index 59a804265f5c..b2b833373882 100644 --- a/core/google/cloud/__init__.py +++ b/core/google/cloud/__init__.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google Cloud namespace package.""" - try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/_helpers.py b/core/google/cloud/_helpers.py index 8dc9bf1cf412..72918e064507 100644 --- a/core/google/cloud/_helpers.py +++ b/core/google/cloud/_helpers.py @@ -17,6 +17,7 @@ This module is not part of the public API surface. """ +# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import import calendar @@ -31,8 +32,6 @@ import google_auth_httplib2 try: - # pylint: disable=ungrouped-imports - # We must import google.auth.transport.grpc within this try: catch. import grpc import google.auth.transport.grpc except ImportError: # pragma: NO COVER @@ -105,7 +104,7 @@ def top(self): :rtype: object :returns: the top-most item, or None if the stack is empty. """ - if self._stack: + if len(self._stack) > 0: return self._stack[-1] diff --git a/core/google/cloud/_http.py b/core/google/cloud/_http.py index ada60b4fb2c3..e1a481e581a7 100644 --- a/core/google/cloud/_http.py +++ b/core/google/cloud/_http.py @@ -279,9 +279,7 @@ def api_request(self, method, path, query_params=None, can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :raises ~google.cloud.exceptions.GoogleCloudError: if the response code - is not 200 OK. - :raises TypeError: if the response content type is not JSON. + :raises: Exception if the response code is not 200 OK. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. diff --git a/core/google/cloud/_testing.py b/core/google/cloud/_testing.py index 871b5f631bc7..a544fffc5fe4 100644 --- a/core/google/cloud/_testing.py +++ b/core/google/cloud/_testing.py @@ -14,15 +14,17 @@ """Shared testing utilities.""" + +# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import class _Monkey(object): - """Context-manager for replacing module names in the scope of a test.""" + # context-manager for replacing module names in the scope of a test. def __init__(self, module, **kw): self.module = module - if not kw: # pragma: NO COVER + if len(kw) == 0: # pragma: NO COVER raise ValueError('_Monkey was used with nothing to monkey-patch') self.to_restore = {key: getattr(module, key) for key in kw} for key, value in kw.items(): @@ -66,12 +68,8 @@ def _tempdir_mgr(): return _tempdir_mgr -# pylint: disable=invalid-name -# Retain _tempdir as a constant for backwards compatibility despite -# being an invalid name. _tempdir = _tempdir_maker() del _tempdir_maker -# pylint: enable=invalid-name class _GAXBaseAPI(object): @@ -81,8 +79,7 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - @staticmethod - def _make_grpc_error(status_code, trailing=None): + def _make_grpc_error(self, status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous @@ -114,7 +111,6 @@ def __init__(self, *pages, **kwargs): self.page_token = kwargs.get('page_token') def next(self): - """Iterate to the next page.""" import six return six.next(self._pages) diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index 5fa7f7ef95a2..9bdbf507d201 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -64,7 +64,7 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. - :raises TypeError: if there is a conflict with the kwargs + :raises: :class:`TypeError` if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: diff --git a/core/google/cloud/future/operation.py b/core/google/cloud/future/operation.py index 8064e5c13e1f..5bbfda1a8f0b 100644 --- a/core/google/cloud/future/operation.py +++ b/core/google/cloud/future/operation.py @@ -34,7 +34,7 @@ class Operation(base.PollingFuture): initial operation. refresh (Callable[[], Operation]): A callable that returns the latest state of the operation. - cancel (Callable[[], None]): A callable that tries to cancel + cancel (Callable[[], None]), A callable that tries to cancel the operation. result_type (type): The protobuf type for the operation's result. metadata_type (type): The protobuf type for the operation's diff --git a/core/google/cloud/iam.py b/core/google/cloud/iam.py index bbc31c047a85..49bb11266cee 100644 --- a/core/google/cloud/iam.py +++ b/core/google/cloud/iam.py @@ -226,14 +226,14 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if self._bindings: + if len(self._bindings) > 0: bindings = resource['bindings'] = [] for role, members in sorted(self._bindings.items()): - if members: + if len(members) > 0: bindings.append( {'role': role, 'members': sorted(set(members))}) - if not bindings: + if len(bindings) == 0: del resource['bindings'] return resource diff --git a/core/google/cloud/iterator.py b/core/google/cloud/iterator.py index 742443ddc5f9..7bb708e90f09 100644 --- a/core/google/cloud/iterator.py +++ b/core/google/cloud/iterator.py @@ -242,8 +242,7 @@ def _page_iter(self, increment): results per page while an items iterator will want to increment per item. - :rtype: :class:`Page` - :returns: pages + Yields :class:`Page` instances. """ page = self._next_page() while page is not None: @@ -388,8 +387,6 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. - - :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': diff --git a/core/google/cloud/operation.py b/core/google/cloud/operation.py index 9f53c595f658..4e700a553e4f 100644 --- a/core/google/cloud/operation.py +++ b/core/google/cloud/operation.py @@ -50,7 +50,7 @@ def register_type(klass, type_url=None): :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. - :raises ValueError: if a registration already exists for the URL. + :raises: ValueError if a registration already exists for the URL. """ if type_url is None: type_url = _compute_type_url(klass) @@ -258,7 +258,7 @@ def poll(self): :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises ValueError: if the operation + :raises: :class:`~exceptions.ValueError` if the operation has already completed. """ if self.complete: diff --git a/core/nox.py b/core/nox.py index 8f025cce8b61..c8f4a942e7a2 100644 --- a/core/nox.py +++ b/core/nox.py @@ -50,8 +50,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install( - 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') + session.install('flake8', 'pylint', 'gcp-devrel-py-tools') session.install('.') session.run('flake8', 'google/cloud/core') session.run( From 04a1e6dd9d278588d0a3b748e74c110d4765de45 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Jul 2017 14:08:55 -0700 Subject: [PATCH 46/62] Fix pylint for the main package (#3658) --- core/.flake8 | 1 + core/google/__init__.py | 2 ++ core/google/cloud/__init__.py | 2 ++ core/google/cloud/_helpers.py | 3 +-- core/google/cloud/_http.py | 4 +++- core/google/cloud/_testing.py | 14 +++++++++----- core/google/cloud/client.py | 2 +- core/google/cloud/future/operation.py | 2 +- core/google/cloud/iam.py | 6 +++--- core/google/cloud/iterator.py | 5 ++++- core/google/cloud/operation.py | 4 ++-- core/nox.py | 3 ++- 12 files changed, 31 insertions(+), 17 deletions(-) diff --git a/core/.flake8 b/core/.flake8 index 25168dc87605..7f4ddb8072b0 100644 --- a/core/.flake8 +++ b/core/.flake8 @@ -1,4 +1,5 @@ [flake8] +import-order-style=google exclude = __pycache__, .git, diff --git a/core/google/__init__.py b/core/google/__init__.py index b2b833373882..a35569c36339 100644 --- a/core/google/__init__.py +++ b/core/google/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/__init__.py b/core/google/cloud/__init__.py index b2b833373882..59a804265f5c 100644 --- a/core/google/cloud/__init__.py +++ b/core/google/cloud/__init__.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Google Cloud namespace package.""" + try: import pkg_resources pkg_resources.declare_namespace(__name__) diff --git a/core/google/cloud/_helpers.py b/core/google/cloud/_helpers.py index 72918e064507..62bbccf74b15 100644 --- a/core/google/cloud/_helpers.py +++ b/core/google/cloud/_helpers.py @@ -17,7 +17,6 @@ This module is not part of the public API surface. """ -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import import calendar @@ -104,7 +103,7 @@ def top(self): :rtype: object :returns: the top-most item, or None if the stack is empty. """ - if len(self._stack) > 0: + if self._stack: return self._stack[-1] diff --git a/core/google/cloud/_http.py b/core/google/cloud/_http.py index e1a481e581a7..ada60b4fb2c3 100644 --- a/core/google/cloud/_http.py +++ b/core/google/cloud/_http.py @@ -279,7 +279,9 @@ def api_request(self, method, path, query_params=None, can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. - :raises: Exception if the response code is not 200 OK. + :raises ~google.cloud.exceptions.GoogleCloudError: if the response code + is not 200 OK. + :raises TypeError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. diff --git a/core/google/cloud/_testing.py b/core/google/cloud/_testing.py index a544fffc5fe4..871b5f631bc7 100644 --- a/core/google/cloud/_testing.py +++ b/core/google/cloud/_testing.py @@ -14,17 +14,15 @@ """Shared testing utilities.""" - -# Avoid the grpc and google.cloud.grpc collision. from __future__ import absolute_import class _Monkey(object): - # context-manager for replacing module names in the scope of a test. + """Context-manager for replacing module names in the scope of a test.""" def __init__(self, module, **kw): self.module = module - if len(kw) == 0: # pragma: NO COVER + if not kw: # pragma: NO COVER raise ValueError('_Monkey was used with nothing to monkey-patch') self.to_restore = {key: getattr(module, key) for key in kw} for key, value in kw.items(): @@ -68,8 +66,12 @@ def _tempdir_mgr(): return _tempdir_mgr +# pylint: disable=invalid-name +# Retain _tempdir as a constant for backwards compatibility despite +# being an invalid name. _tempdir = _tempdir_maker() del _tempdir_maker +# pylint: enable=invalid-name class _GAXBaseAPI(object): @@ -79,7 +81,8 @@ class _GAXBaseAPI(object): def __init__(self, **kw): self.__dict__.update(kw) - def _make_grpc_error(self, status_code, trailing=None): + @staticmethod + def _make_grpc_error(status_code, trailing=None): from grpc._channel import _RPCState from google.cloud.exceptions import GrpcRendezvous @@ -111,6 +114,7 @@ def __init__(self, *pages, **kwargs): self.page_token = kwargs.get('page_token') def next(self): + """Iterate to the next page.""" import six return six.next(self._pages) diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index 9bdbf507d201..5fa7f7ef95a2 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -64,7 +64,7 @@ def from_service_account_json(cls, json_credentials_path, *args, **kwargs): :rtype: :class:`_ClientFactoryMixin` :returns: The client created with the retrieved JSON credentials. - :raises: :class:`TypeError` if there is a conflict with the kwargs + :raises TypeError: if there is a conflict with the kwargs and the credentials created by the factory. """ if 'credentials' in kwargs: diff --git a/core/google/cloud/future/operation.py b/core/google/cloud/future/operation.py index 5bbfda1a8f0b..8064e5c13e1f 100644 --- a/core/google/cloud/future/operation.py +++ b/core/google/cloud/future/operation.py @@ -34,7 +34,7 @@ class Operation(base.PollingFuture): initial operation. refresh (Callable[[], Operation]): A callable that returns the latest state of the operation. - cancel (Callable[[], None]), A callable that tries to cancel + cancel (Callable[[], None]): A callable that tries to cancel the operation. result_type (type): The protobuf type for the operation's result. metadata_type (type): The protobuf type for the operation's diff --git a/core/google/cloud/iam.py b/core/google/cloud/iam.py index 49bb11266cee..bbc31c047a85 100644 --- a/core/google/cloud/iam.py +++ b/core/google/cloud/iam.py @@ -226,14 +226,14 @@ def to_api_repr(self): if self.version is not None: resource['version'] = self.version - if len(self._bindings) > 0: + if self._bindings: bindings = resource['bindings'] = [] for role, members in sorted(self._bindings.items()): - if len(members) > 0: + if members: bindings.append( {'role': role, 'members': sorted(set(members))}) - if len(bindings) == 0: + if not bindings: del resource['bindings'] return resource diff --git a/core/google/cloud/iterator.py b/core/google/cloud/iterator.py index 7bb708e90f09..742443ddc5f9 100644 --- a/core/google/cloud/iterator.py +++ b/core/google/cloud/iterator.py @@ -242,7 +242,8 @@ def _page_iter(self, increment): results per page while an items iterator will want to increment per item. - Yields :class:`Page` instances. + :rtype: :class:`Page` + :returns: pages """ page = self._next_page() while page is not None: @@ -387,6 +388,8 @@ def _get_next_page_response(self): :rtype: dict :returns: The parsed JSON response of the next page's contents. + + :raises ValueError: If the HTTP method is not ``GET`` or ``POST``. """ params = self._get_query_params() if self._HTTP_METHOD == 'GET': diff --git a/core/google/cloud/operation.py b/core/google/cloud/operation.py index 4e700a553e4f..9f53c595f658 100644 --- a/core/google/cloud/operation.py +++ b/core/google/cloud/operation.py @@ -50,7 +50,7 @@ def register_type(klass, type_url=None): :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. - :raises: ValueError if a registration already exists for the URL. + :raises ValueError: if a registration already exists for the URL. """ if type_url is None: type_url = _compute_type_url(klass) @@ -258,7 +258,7 @@ def poll(self): :rtype: bool :returns: A boolean indicating if the current operation has completed. - :raises: :class:`~exceptions.ValueError` if the operation + :raises ValueError: if the operation has already completed. """ if self.complete: diff --git a/core/nox.py b/core/nox.py index c8f4a942e7a2..8f025cce8b61 100644 --- a/core/nox.py +++ b/core/nox.py @@ -50,7 +50,8 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', 'pylint', 'gcp-devrel-py-tools') + session.install( + 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') session.install('.') session.run('flake8', 'google/cloud/core') session.run( From 6ebb916ccc3b20c2dab32d4106a28219f379e5fd Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Jul 2017 14:17:14 -0700 Subject: [PATCH 47/62] Remove QueryJob.results() (#3661) --- .../google/cloud/bigquery/dbapi/_helpers.py | 21 ----------- .../google/cloud/bigquery/dbapi/cursor.py | 10 +++-- bigquery/google/cloud/bigquery/job.py | 16 -------- bigquery/tests/unit/test_dbapi__helpers.py | 37 ------------------- bigquery/tests/unit/test_dbapi_cursor.py | 20 +++++++++- bigquery/tests/unit/test_job.py | 11 ------ 6 files changed, 25 insertions(+), 90 deletions(-) diff --git a/bigquery/google/cloud/bigquery/dbapi/_helpers.py b/bigquery/google/cloud/bigquery/dbapi/_helpers.py index 1a9a02fd7cc7..a9a358cbf0f5 100644 --- a/bigquery/google/cloud/bigquery/dbapi/_helpers.py +++ b/bigquery/google/cloud/bigquery/dbapi/_helpers.py @@ -15,7 +15,6 @@ import collections import datetime import numbers -import time import six @@ -23,26 +22,6 @@ from google.cloud.bigquery.dbapi import exceptions -def wait_for_job(job): - """Waits for a job to complete by polling until the state is `DONE`. - - Sleeps 1 second between calls to the BigQuery API. - - :type job: :class:`~google.cloud.bigquery.job._AsyncJob` - :param job: Wait for this job to finish. - - :raises: :class:`~google.cloud.bigquery.dbapi.exceptions.DatabaseError` - if the job fails. - """ - while True: - job.reload() - if job.state == 'DONE': - if job.error_result: - raise exceptions.DatabaseError(job.errors) - return - time.sleep(1) - - def scalar_to_query_parameter(value, name=None): """Convert a scalar value into a query parameter. diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py index bcbb19cfd066..7519c762ae1e 100644 --- a/bigquery/google/cloud/bigquery/dbapi/cursor.py +++ b/bigquery/google/cloud/bigquery/dbapi/cursor.py @@ -21,7 +21,7 @@ from google.cloud.bigquery.dbapi import _helpers from google.cloud.bigquery.dbapi import exceptions - +import google.cloud.exceptions # Per PEP 249: A 7-item sequence containing information describing one result # column. The first two items (name and type_code) are mandatory, the other @@ -148,9 +148,11 @@ def execute(self, operation, parameters=None): formatted_operation, query_parameters=query_parameters) query_job.use_legacy_sql = False - query_job.begin() - _helpers.wait_for_job(query_job) - query_results = query_job.results() + + try: + query_results = query_job.result() + except google.cloud.exceptions.GoogleCloudError: + raise exceptions.DatabaseError(query_job.errors) # Force the iterator to run because the query_results doesn't # have the total_rows populated. See: diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 35a423b755b9..3e6a9f93418b 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -16,7 +16,6 @@ import collections import threading -import warnings import six from six.moves import http_client @@ -1264,21 +1263,6 @@ def query_results(self): from google.cloud.bigquery.query import QueryResults return QueryResults.from_query_job(self) - def results(self): - """DEPRECATED. - - This method is deprecated. Use :meth:`query_results` or :meth:`result`. - - Construct a QueryResults instance, bound to this job. - - :rtype: :class:`~google.cloud.bigquery.query.QueryResults` - :returns: The query results. - """ - warnings.warn( - 'QueryJob.results() is deprecated. Please use query_results() or ' - 'result().', DeprecationWarning) - return self.query_results() - def result(self, timeout=None): """Start the job and wait for it to complete and get the result. diff --git a/bigquery/tests/unit/test_dbapi__helpers.py b/bigquery/tests/unit/test_dbapi__helpers.py index e030ed49df0c..48bca5ae9a59 100644 --- a/bigquery/tests/unit/test_dbapi__helpers.py +++ b/bigquery/tests/unit/test_dbapi__helpers.py @@ -16,48 +16,11 @@ import math import unittest -import mock - import google.cloud._helpers from google.cloud.bigquery.dbapi import _helpers from google.cloud.bigquery.dbapi import exceptions -class Test_wait_for_job(unittest.TestCase): - - def _mock_job(self): - from google.cloud.bigquery import job - mock_job = mock.create_autospec(job.QueryJob) - mock_job.state = 'RUNNING' - mock_job._mocked_iterations = 0 - - def mock_reload(): - mock_job._mocked_iterations += 1 - if mock_job._mocked_iterations >= 2: - mock_job.state = 'DONE' - - mock_job.reload.side_effect = mock_reload - return mock_job - - def _call_fut(self, job): - from google.cloud.bigquery.dbapi._helpers import wait_for_job - with mock.patch('time.sleep'): - wait_for_job(job) - - def test_wo_error(self): - mock_job = self._mock_job() - mock_job.error_result = None - self._call_fut(mock_job) - self.assertEqual('DONE', mock_job.state) - - def test_w_error(self): - from google.cloud.bigquery.dbapi import exceptions - mock_job = self._mock_job() - mock_job.error_result = {'reason': 'invalidQuery'} - self.assertRaises(exceptions.DatabaseError, self._call_fut, mock_job) - self.assertEqual('DONE', mock_job.state) - - class TestQueryParameters(unittest.TestCase): def test_scalar_to_query_parameter(self): diff --git a/bigquery/tests/unit/test_dbapi_cursor.py b/bigquery/tests/unit/test_dbapi_cursor.py index 9671a27b8f8f..2a2ccfd989a6 100644 --- a/bigquery/tests/unit/test_dbapi_cursor.py +++ b/bigquery/tests/unit/test_dbapi_cursor.py @@ -42,7 +42,7 @@ def _mock_job( mock_job = mock.create_autospec(job.QueryJob) mock_job.error_result = None mock_job.state = 'DONE' - mock_job.results.return_value = self._mock_results( + mock_job.result.return_value = self._mock_results( rows=rows, schema=schema, num_dml_affected_rows=num_dml_affected_rows) return mock_job @@ -219,6 +219,24 @@ def test_execute_w_query(self): row = cursor.fetchone() self.assertIsNone(row) + def test_execute_raises_if_result_raises(self): + import google.cloud.exceptions + + from google.cloud.bigquery import client + from google.cloud.bigquery import job + from google.cloud.bigquery.dbapi import connect + from google.cloud.bigquery.dbapi import exceptions + + job = mock.create_autospec(job.QueryJob) + job.result.side_effect = google.cloud.exceptions.GoogleCloudError('') + client = mock.create_autospec(client.Client) + client.run_async_query.return_value = job + connection = connect(client) + cursor = connection.cursor() + + with self.assertRaises(exceptions.DatabaseError): + cursor.execute('SELECT 1') + def test_executemany_w_dml(self): from google.cloud.bigquery.dbapi import connect connection = connect( diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 8b9d079df148..fcb518d9c502 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -13,7 +13,6 @@ # limitations under the License. import copy -import warnings from six.moves import http_client import unittest @@ -1560,16 +1559,6 @@ def test_query_results(self): self.assertIsInstance(results, QueryResults) self.assertIs(results._job, job) - def test_results_is_deprecated(self): - client = _Client(self.PROJECT) - job = self._make_one(self.JOB_NAME, self.QUERY, client) - - with warnings.catch_warnings(record=True) as warned: - warnings.simplefilter('always') - job.results() - self.assertEqual(len(warned), 1) - self.assertIn('deprecated', str(warned[0])) - def test_result(self): from google.cloud.bigquery.query import QueryResults From 0afe56cda98742f5172c7dae6c3d9ed9631457af Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Jul 2017 15:29:38 -0700 Subject: [PATCH 48/62] Split polling future into its own module (#3662) --- bigquery/google/cloud/bigquery/job.py | 4 +- core/google/cloud/future/base.py | 149 --------------- core/google/cloud/future/operation.py | 4 +- core/google/cloud/future/polling.py | 169 ++++++++++++++++++ core/tests/unit/future/test_operation.py | 2 +- .../future/{test_base.py => test_polling.py} | 4 +- 6 files changed, 176 insertions(+), 156 deletions(-) create mode 100644 core/google/cloud/future/polling.py rename core/tests/unit/future/{test_base.py => test_polling.py} (97%) diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 3e6a9f93418b..ef5353f9ff14 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -32,7 +32,7 @@ from google.cloud.bigquery._helpers import UDFResourcesProperty from google.cloud.bigquery._helpers import _EnumProperty from google.cloud.bigquery._helpers import _TypedProperty -import google.cloud.future.base +import google.cloud.future.polling _DONE_STATE = 'DONE' _STOPPED_REASON = 'stopped' @@ -141,7 +141,7 @@ class WriteDisposition(_EnumProperty): ALLOWED = (WRITE_APPEND, WRITE_TRUNCATE, WRITE_EMPTY) -class _AsyncJob(google.cloud.future.base.PollingFuture): +class _AsyncJob(google.cloud.future.polling.PollingFuture): """Base class for asynchronous jobs. :type name: str diff --git a/core/google/cloud/future/base.py b/core/google/cloud/future/base.py index aed1dfd80e5d..243913640d62 100644 --- a/core/google/cloud/future/base.py +++ b/core/google/cloud/future/base.py @@ -15,14 +15,8 @@ """Abstract and helper bases for Future implementations.""" import abc -import concurrent.futures -import functools -import operator import six -import tenacity - -from google.cloud.future import _helpers @six.add_metaclass(abc.ABCMeta) @@ -71,146 +65,3 @@ def set_result(self, result): @abc.abstractmethod def set_exception(self, exception): raise NotImplementedError() - - -class PollingFuture(Future): - """A Future that needs to poll some service to check its status. - - The :meth:`done` method should be implemented by subclasses. The polling - behavior will repeatedly call ``done`` until it returns True. - - .. note: Privacy here is intended to prevent the final class from - overexposing, not to prevent subclasses from accessing methods. - """ - def __init__(self): - super(PollingFuture, self).__init__() - self._result = None - self._exception = None - self._result_set = False - """bool: Set to True when the result has been set via set_result or - set_exception.""" - self._polling_thread = None - self._done_callbacks = [] - - @abc.abstractmethod - def done(self): - """Checks to see if the operation is complete. - - Returns: - bool: True if the operation is complete, False otherwise. - """ - # pylint: disable=redundant-returns-doc, missing-raises-doc - raise NotImplementedError() - - def running(self): - """True if the operation is currently running.""" - return not self.done() - - def _blocking_poll(self, timeout=None): - """Poll and wait for the Future to be resolved. - - Args: - timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. - """ - if self._result_set: - return - - retry_on = tenacity.retry_if_result( - functools.partial(operator.is_not, True)) - # Use exponential backoff with jitter. - wait_on = ( - tenacity.wait_exponential(multiplier=1, max=10) + - tenacity.wait_random(0, 1)) - - if timeout is None: - retry = tenacity.retry(retry=retry_on, wait=wait_on) - else: - retry = tenacity.retry( - retry=retry_on, - wait=wait_on, - stop=tenacity.stop_after_delay(timeout)) - - try: - retry(self.done)() - except tenacity.RetryError as exc: - six.raise_from( - concurrent.futures.TimeoutError( - 'Operation did not complete within the designated ' - 'timeout.'), - exc) - - def result(self, timeout=None): - """Get the result of the operation, blocking if necessary. - - Args: - timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. - - Returns: - google.protobuf.Message: The Operation's result. - - Raises: - google.gax.GaxError: If the operation errors or if the timeout is - reached before the operation completes. - """ - self._blocking_poll(timeout=timeout) - - if self._exception is not None: - # pylint: disable=raising-bad-type - # Pylint doesn't recognize that this is valid in this case. - raise self._exception - - return self._result - - def exception(self, timeout=None): - """Get the exception from the operation, blocking if necessary. - - Args: - timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. - - Returns: - Optional[google.gax.GaxError]: The operation's error. - """ - self._blocking_poll() - return self._exception - - def add_done_callback(self, fn): - """Add a callback to be executed when the operation is complete. - - If the operation is not already complete, this will start a helper - thread to poll for the status of the operation in the background. - - Args: - fn (Callable[Future]): The callback to execute when the operation - is complete. - """ - if self._result_set: - _helpers.safe_invoke_callback(fn, self) - return - - self._done_callbacks.append(fn) - - if self._polling_thread is None: - # The polling thread will exit on its own as soon as the operation - # is done. - self._polling_thread = _helpers.start_daemon_thread( - target=self._blocking_poll) - - def _invoke_callbacks(self, *args, **kwargs): - """Invoke all done callbacks.""" - for callback in self._done_callbacks: - _helpers.safe_invoke_callback(callback, *args, **kwargs) - - def set_result(self, result): - """Set the Future's result.""" - self._result = result - self._result_set = True - self._invoke_callbacks(self) - - def set_exception(self, exception): - """Set the Future's exception.""" - self._exception = exception - self._result_set = True - self._invoke_callbacks(self) diff --git a/core/google/cloud/future/operation.py b/core/google/cloud/future/operation.py index 8064e5c13e1f..21da738ca0ff 100644 --- a/core/google/cloud/future/operation.py +++ b/core/google/cloud/future/operation.py @@ -23,10 +23,10 @@ from google.cloud import _helpers from google.cloud import exceptions -from google.cloud.future import base +from google.cloud.future import polling -class Operation(base.PollingFuture): +class Operation(polling.PollingFuture): """A Future for interacting with a Google API Long-Running Operation. Args: diff --git a/core/google/cloud/future/polling.py b/core/google/cloud/future/polling.py new file mode 100644 index 000000000000..6b7ae4221f64 --- /dev/null +++ b/core/google/cloud/future/polling.py @@ -0,0 +1,169 @@ +# Copyright 2017, Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Abstract and helper bases for Future implementations.""" + +import abc +import concurrent.futures +import functools +import operator + +import six +import tenacity + +from google.cloud.future import _helpers +from google.cloud.future import base + + +class PollingFuture(base.Future): + """A Future that needs to poll some service to check its status. + + The :meth:`done` method should be implemented by subclasses. The polling + behavior will repeatedly call ``done`` until it returns True. + + .. note: Privacy here is intended to prevent the final class from + overexposing, not to prevent subclasses from accessing methods. + """ + def __init__(self): + super(PollingFuture, self).__init__() + self._result = None + self._exception = None + self._result_set = False + """bool: Set to True when the result has been set via set_result or + set_exception.""" + self._polling_thread = None + self._done_callbacks = [] + + @abc.abstractmethod + def done(self): + """Checks to see if the operation is complete. + + Returns: + bool: True if the operation is complete, False otherwise. + """ + # pylint: disable=redundant-returns-doc, missing-raises-doc + raise NotImplementedError() + + def running(self): + """True if the operation is currently running.""" + return not self.done() + + def _blocking_poll(self, timeout=None): + """Poll and wait for the Future to be resolved. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + """ + if self._result_set: + return + + retry_on = tenacity.retry_if_result( + functools.partial(operator.is_not, True)) + # Use exponential backoff with jitter. + wait_on = ( + tenacity.wait_exponential(multiplier=1, max=10) + + tenacity.wait_random(0, 1)) + + if timeout is None: + retry = tenacity.retry(retry=retry_on, wait=wait_on) + else: + retry = tenacity.retry( + retry=retry_on, + wait=wait_on, + stop=tenacity.stop_after_delay(timeout)) + + try: + retry(self.done)() + except tenacity.RetryError as exc: + six.raise_from( + concurrent.futures.TimeoutError( + 'Operation did not complete within the designated ' + 'timeout.'), + exc) + + def result(self, timeout=None): + """Get the result of the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + google.protobuf.Message: The Operation's result. + + Raises: + google.gax.GaxError: If the operation errors or if the timeout is + reached before the operation completes. + """ + self._blocking_poll(timeout=timeout) + + if self._exception is not None: + # pylint: disable=raising-bad-type + # Pylint doesn't recognize that this is valid in this case. + raise self._exception + + return self._result + + def exception(self, timeout=None): + """Get the exception from the operation, blocking if necessary. + + Args: + timeout (int): How long to wait for the operation to complete. + If None, wait indefinitely. + + Returns: + Optional[google.gax.GaxError]: The operation's error. + """ + self._blocking_poll() + return self._exception + + def add_done_callback(self, fn): + """Add a callback to be executed when the operation is complete. + + If the operation is not already complete, this will start a helper + thread to poll for the status of the operation in the background. + + Args: + fn (Callable[Future]): The callback to execute when the operation + is complete. + """ + if self._result_set: + _helpers.safe_invoke_callback(fn, self) + return + + self._done_callbacks.append(fn) + + if self._polling_thread is None: + # The polling thread will exit on its own as soon as the operation + # is done. + self._polling_thread = _helpers.start_daemon_thread( + target=self._blocking_poll) + + def _invoke_callbacks(self, *args, **kwargs): + """Invoke all done callbacks.""" + for callback in self._done_callbacks: + _helpers.safe_invoke_callback(callback, *args, **kwargs) + + def set_result(self, result): + """Set the Future's result.""" + self._result = result + self._result_set = True + self._invoke_callbacks(self) + + def set_exception(self, exception): + """Set the Future's exception.""" + self._exception = exception + self._result_set = True + self._invoke_callbacks(self) diff --git a/core/tests/unit/future/test_operation.py b/core/tests/unit/future/test_operation.py index 0e29aa687ee6..2d281694001a 100644 --- a/core/tests/unit/future/test_operation.py +++ b/core/tests/unit/future/test_operation.py @@ -61,7 +61,7 @@ def make_operation_future(client_operations_responses=None): def test_constructor(): - future, refresh, cancel = make_operation_future() + future, refresh, _ = make_operation_future() assert future.operation == refresh.responses[0] assert future.operation.done is False diff --git a/core/tests/unit/future/test_base.py b/core/tests/unit/future/test_polling.py similarity index 97% rename from core/tests/unit/future/test_base.py rename to core/tests/unit/future/test_polling.py index 69a0348e68d9..c8fde1c20385 100644 --- a/core/tests/unit/future/test_base.py +++ b/core/tests/unit/future/test_polling.py @@ -19,10 +19,10 @@ import mock import pytest -from google.cloud.future import base +from google.cloud.future import polling -class PollingFutureImpl(base.PollingFuture): +class PollingFutureImpl(polling.PollingFuture): def done(self): return False From 9deef80069f53bc6f9ba2c647d9715378ed56cdd Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Jul 2017 16:09:58 -0700 Subject: [PATCH 49/62] Re-enable flake8 for core package and tests (#3664) --- core/.flake8 | 3 +++ core/google/cloud/_helpers.py | 10 +++++----- core/google/cloud/_http.py | 2 +- core/google/cloud/client.py | 4 ++-- core/google/cloud/credentials.py | 6 +++--- core/google/cloud/exceptions.py | 5 +++-- core/google/cloud/future/operation.py | 7 +++---- core/nox.py | 2 +- core/tests/unit/test_credentials.py | 11 +++++------ core/tests/unit/test_iam.py | 1 - 10 files changed, 26 insertions(+), 25 deletions(-) diff --git a/core/.flake8 b/core/.flake8 index 7f4ddb8072b0..3db9b737d6bc 100644 --- a/core/.flake8 +++ b/core/.flake8 @@ -1,5 +1,8 @@ [flake8] import-order-style=google +# Note: this forces all google imports to be in the third group. See +# https://github.com/PyCQA/flake8-import-order/issues/111 +application-import-names=google exclude = __pycache__, .git, diff --git a/core/google/cloud/_helpers.py b/core/google/cloud/_helpers.py index 62bbccf74b15..fdb22ecdf09c 100644 --- a/core/google/cloud/_helpers.py +++ b/core/google/cloud/_helpers.py @@ -25,10 +25,14 @@ import re from threading import local as Local +import google_auth_httplib2 +import httplib2 +import six +from six.moves import http_client + import google.auth from google.protobuf import duration_pb2 from google.protobuf import timestamp_pb2 -import google_auth_httplib2 try: import grpc @@ -36,10 +40,6 @@ except ImportError: # pragma: NO COVER grpc = None -import httplib2 -import six -from six.moves import http_client - _NOW = datetime.datetime.utcnow # To be replaced by tests. _RFC3339_MICROS = '%Y-%m-%dT%H:%M:%S.%fZ' diff --git a/core/google/cloud/_http.py b/core/google/cloud/_http.py index ada60b4fb2c3..186d6216e7eb 100644 --- a/core/google/cloud/_http.py +++ b/core/google/cloud/_http.py @@ -16,8 +16,8 @@ import json import platform -from pkg_resources import get_distribution +from pkg_resources import get_distribution import six from six.moves.urllib.parse import urlencode diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index 5fa7f7ef95a2..5906ab5ed108 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -18,13 +18,13 @@ import json from pickle import PicklingError -import google.auth.credentials -from google.oauth2 import service_account import google_auth_httplib2 import six +import google.auth.credentials from google.cloud._helpers import _determine_default_project from google.cloud.credentials import get_credentials +from google.oauth2 import service_account _GOOGLE_AUTH_CREDENTIALS_HELP = ( diff --git a/core/google/cloud/credentials.py b/core/google/cloud/credentials.py index e5fe30245ea5..29c4a5d310f4 100644 --- a/core/google/cloud/credentials.py +++ b/core/google/cloud/credentials.py @@ -16,15 +16,15 @@ import base64 import datetime + import six from six.moves.urllib.parse import urlencode import google.auth import google.auth.credentials - -from google.cloud._helpers import UTC -from google.cloud._helpers import _NOW from google.cloud._helpers import _microseconds_from_datetime +from google.cloud._helpers import _NOW +from google.cloud._helpers import UTC def get_credentials(): diff --git a/core/google/cloud/exceptions.py b/core/google/cloud/exceptions.py index 32080de7ff50..e911980c6328 100644 --- a/core/google/cloud/exceptions.py +++ b/core/google/cloud/exceptions.py @@ -22,17 +22,18 @@ import copy import json + import six from google.cloud._helpers import _to_bytes -_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module - try: from grpc._channel import _Rendezvous except ImportError: # pragma: NO COVER _Rendezvous = None +_HTTP_CODE_TO_EXCEPTION = {} # populated at end of module + # pylint: disable=invalid-name GrpcRendezvous = _Rendezvous diff --git a/core/google/cloud/future/operation.py b/core/google/cloud/future/operation.py index 21da738ca0ff..ec430cd9c55b 100644 --- a/core/google/cloud/future/operation.py +++ b/core/google/cloud/future/operation.py @@ -17,13 +17,12 @@ import functools import threading -from google.longrunning import operations_pb2 -from google.protobuf import json_format -from google.rpc import code_pb2 - from google.cloud import _helpers from google.cloud import exceptions from google.cloud.future import polling +from google.longrunning import operations_pb2 +from google.protobuf import json_format +from google.rpc import code_pb2 class Operation(polling.PollingFuture): diff --git a/core/nox.py b/core/nox.py index 8f025cce8b61..48b55332283e 100644 --- a/core/nox.py +++ b/core/nox.py @@ -53,7 +53,7 @@ def lint(session): session.install( 'flake8', 'flake8-import-order', 'pylint', 'gcp-devrel-py-tools') session.install('.') - session.run('flake8', 'google/cloud/core') + session.run('flake8', 'google', 'tests') session.run( 'gcp-devrel-py-tools', 'run-pylint', '--config', 'pylint.config.py', diff --git a/core/tests/unit/test_credentials.py b/core/tests/unit/test_credentials.py index 53370a061494..aaffa907dda1 100644 --- a/core/tests/unit/test_credentials.py +++ b/core/tests/unit/test_credentials.py @@ -15,6 +15,7 @@ import unittest import mock +import six class Test_get_credentials(unittest.TestCase): @@ -169,12 +170,10 @@ def test_w_int(self): self.assertEqual(self._call_fut(123), 123) def test_w_long(self): - try: - long - except NameError: # pragma: NO COVER Py3K - pass - else: - self.assertEqual(self._call_fut(long(123)), 123) + if six.PY3: + raise unittest.SkipTest('No long on Python 3') + + self.assertEqual(self._call_fut(long(123)), 123) # noqa: F821 def test_w_naive_datetime(self): import datetime diff --git a/core/tests/unit/test_iam.py b/core/tests/unit/test_iam.py index d076edd6eba9..4a17c61ce173 100644 --- a/core/tests/unit/test_iam.py +++ b/core/tests/unit/test_iam.py @@ -200,7 +200,6 @@ def test_from_api_repr_complete(self): {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, ], } - empty = frozenset() klass = self._get_target_class() policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') From e14094c3790b685aed0d9caa52d3c394c4d394d6 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 24 Jul 2017 20:44:03 -0400 Subject: [PATCH 50/62] Add systest for round-trip of NULL INT64. (#3665) Include NULL values in ARRAY. --- spanner/tests/system/test_system.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/spanner/tests/system/test_system.py b/spanner/tests/system/test_system.py index e6d73f977e94..b2f83ce9fa1d 100644 --- a/spanner/tests/system/test_system.py +++ b/spanner/tests/system/test_system.py @@ -371,10 +371,11 @@ class TestSessionAPI(unittest.TestCase, _TestData): BYTES_1 = b'Ymlu' BYTES_2 = b'Ym9vdHM=' ALL_TYPES_ROWDATA = ( + ([], False, None, None, 0.0, None, None, None), ([1], True, BYTES_1, SOME_DATE, 0.0, 19, u'dog', SOME_TIME), ([5, 10], True, BYTES_1, None, 1.25, 99, u'cat', None), ([], False, BYTES_2, None, float('inf'), 107, u'frog', None), - ([], False, None, None, float('-inf'), 207, None, None), + ([3, None, 9], False, None, None, float('-inf'), 207, None, None), ([], False, None, None, float('nan'), 1207, None, None), ([], False, None, None, OTHER_NAN, 2000, None, NANO_TIME), ) @@ -903,7 +904,7 @@ def test_execute_sql_w_query_param(self): params={'lower': 0.0, 'upper': 1.0}, param_types={ 'lower': Type(code=FLOAT64), 'upper': Type(code=FLOAT64)}, - expected=[(19,)], + expected=[(None,), (19,)], ) # Find -inf From b32509f7dbd4d87e15c8877667cc222fa344b9a2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Jul 2017 10:31:53 -0700 Subject: [PATCH 51/62] Adding getter/setter note for Blob.metadata property. (#3647) Fixes #3645. --- storage/google/cloud/storage/blob.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index d03d1364cf40..8d6ec2619ea1 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -1400,6 +1400,11 @@ def metadata(self): See https://cloud.google.com/storage/docs/json_api/v1/objects + :setter: Update arbitrary/application specific metadata for the + object. + :getter: Retrieve arbitrary/application specific metadata for + the object. + :rtype: dict or ``NoneType`` :returns: The metadata associated with the blob or ``None`` if the property is not set locally. From cf4ab44da1315acd6d9ec34025c677813d5d685d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isra=C3=ABl=20Hall=C3=A9?= Date: Tue, 25 Jul 2017 14:10:47 -0400 Subject: [PATCH 52/62] Correctly url-encode list parameters (#3657) --- core/google/cloud/_http.py | 2 +- core/tests/unit/test__http.py | 24 ++++++++++++++++-------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/core/google/cloud/_http.py b/core/google/cloud/_http.py index 186d6216e7eb..b7c17ca91d6d 100644 --- a/core/google/cloud/_http.py +++ b/core/google/cloud/_http.py @@ -135,7 +135,7 @@ def build_api_url(cls, path, query_params=None, query_params = query_params or {} if query_params: - url += '?' + urlencode(query_params) + url += '?' + urlencode(query_params, doseq=True) return url diff --git a/core/tests/unit/test__http.py b/core/tests/unit/test__http.py index 1226042b5859..22df11566811 100644 --- a/core/tests/unit/test__http.py +++ b/core/tests/unit/test__http.py @@ -94,12 +94,15 @@ def test_build_api_url_no_extra_query_params(self): self.assertEqual(conn.build_api_url('/foo'), URI) def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit client = object() conn = self._make_mock_one(client) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) + uri = conn.build_api_url('/foo', { + 'bar': 'baz', + 'qux': ['quux', 'corge'] + }) scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) @@ -111,8 +114,9 @@ def test_build_api_url_w_extra_query_params(self): 'foo', ]) self.assertEqual(path, PATH) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') + parms = dict(parse_qs(qs)) + self.assertEqual(parms['bar'], ['baz']) + self.assertEqual(parms['qux'], ['quux', 'corge']) def test__make_request_no_data_no_content_type_no_headers(self): http = _Http( @@ -222,7 +226,7 @@ def test_api_request_wo_json_expected(self): b'CONTENT') def test_api_request_w_query_params(self): - from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import parse_qs from six.moves.urllib.parse import urlsplit http = _Http( @@ -231,7 +235,10 @@ def test_api_request_w_query_params(self): ) client = mock.Mock(_http=http, spec=['_http']) conn = self._make_mock_one(client) - self.assertEqual(conn.api_request('GET', '/', {'foo': 'bar'}), {}) + self.assertEqual(conn.api_request('GET', '/', { + 'foo': 'bar', + 'baz': ['qux', 'quux'] + }), {}) self.assertEqual(http._called_with['method'], 'GET') uri = http._called_with['uri'] scheme, netloc, path, qs, _ = urlsplit(uri) @@ -244,8 +251,9 @@ def test_api_request_w_query_params(self): '', ]) self.assertEqual(path, PATH) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['foo'], 'bar') + parms = dict(parse_qs(qs)) + self.assertEqual(parms['foo'], ['bar']) + self.assertEqual(parms['baz'], ['qux', 'quux']) self.assertIsNone(http._called_with['body']) expected_headers = { 'Accept-Encoding': 'gzip', From 73648e2f116e0605a122432ce7dc384a54bfe098 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Jul 2017 13:10:24 -0700 Subject: [PATCH 53/62] Moving bytes signing helpers from `core` to `storage`. (#3668) --- core/google/cloud/credentials.py | 173 ------------------ core/tests/unit/test_credentials.py | 199 -------------------- storage/google/cloud/storage/_signing.py | 189 +++++++++++++++++++ storage/google/cloud/storage/blob.py | 2 +- storage/nox.py | 12 +- storage/tests/unit/test__signing.py | 222 +++++++++++++++++++++++ 6 files changed, 421 insertions(+), 376 deletions(-) create mode 100644 storage/google/cloud/storage/_signing.py create mode 100644 storage/tests/unit/test__signing.py diff --git a/core/google/cloud/credentials.py b/core/google/cloud/credentials.py index 29c4a5d310f4..b434cac2f1e7 100644 --- a/core/google/cloud/credentials.py +++ b/core/google/cloud/credentials.py @@ -14,17 +14,7 @@ """A simple wrapper around the OAuth2 credentials library.""" -import base64 -import datetime - -import six -from six.moves.urllib.parse import urlencode - import google.auth -import google.auth.credentials -from google.cloud._helpers import _microseconds_from_datetime -from google.cloud._helpers import _NOW -from google.cloud._helpers import UTC def get_credentials(): @@ -38,166 +28,3 @@ def get_credentials(): """ credentials, _ = google.auth.default() return credentials - - -def _get_signed_query_params(credentials, expiration, string_to_sign): - """Gets query parameters for creating a signed URL. - - :type credentials: :class:`google.auth.credentials.Signer` - :param credentials: The credentials used to create a private key - for signing text. - - :type expiration: int or long - :param expiration: When the signed URL should expire. - - :type string_to_sign: str - :param string_to_sign: The string to be signed by the credentials. - - :raises AttributeError: If :meth: sign_blob is unavailable. - - :rtype: dict - :returns: Query parameters matching the signing credentials with a - signed payload. - """ - if not isinstance(credentials, google.auth.credentials.Signing): - auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' - 'core/auth.html?highlight=authentication#setting-up-' - 'a-service-account') - raise AttributeError('you need a private key to sign credentials.' - 'the credentials you are currently using %s ' - 'just contains a token. see %s for more ' - 'details.' % (type(credentials), auth_uri)) - - signature_bytes = credentials.sign_bytes(string_to_sign) - signature = base64.b64encode(signature_bytes) - service_account_name = credentials.signer_email - return { - 'GoogleAccessId': service_account_name, - 'Expires': str(expiration), - 'Signature': signature, - } - - -def _get_expiration_seconds(expiration): - """Convert 'expiration' to a number of seconds in the future. - - :type expiration: int, long, datetime.datetime, datetime.timedelta - :param expiration: When the signed URL should expire. - - :raises TypeError: When expiration is not an integer. - - :rtype: int - :returns: a timestamp as an absolute number of seconds. - """ - # If it's a timedelta, add it to `now` in UTC. - if isinstance(expiration, datetime.timedelta): - now = _NOW().replace(tzinfo=UTC) - expiration = now + expiration - - # If it's a datetime, convert to a timestamp. - if isinstance(expiration, datetime.datetime): - micros = _microseconds_from_datetime(expiration) - expiration = micros // 10**6 - - if not isinstance(expiration, six.integer_types): - raise TypeError('Expected an integer timestamp, datetime, or ' - 'timedelta. Got %s' % type(expiration)) - return expiration - - -def generate_signed_url(credentials, resource, expiration, - api_access_endpoint='', - method='GET', content_md5=None, - content_type=None, response_type=None, - response_disposition=None, generation=None): - """Generate signed URL to provide query-string auth'n to a resource. - - .. note:: - - Assumes ``credentials`` implements the - :class:`google.auth.credentials.Signing` interface. Also assumes - ``credentials`` has a ``service_account_email`` property which - identifies the credentials. - - .. note:: - - If you are on Google Compute Engine, you can't generate a signed URL. - Follow `Issue 922`_ for updates on this. If you'd like to be able to - generate a signed URL from GCE, you can use a standard service account - from a JSON file rather than a GCE service account. - - See headers `reference`_ for more details on optional arguments. - - .. _Issue 922: https://github.com/GoogleCloudPlatform/\ - google-cloud-python/issues/922 - .. _reference: https://cloud.google.com/storage/docs/reference-headers - - :type credentials: :class:`google.auth.credentials.Signing` - :param credentials: Credentials object with an associated private key to - sign text. - - :type resource: str - :param resource: A pointer to a specific resource - (typically, ``/bucket-name/path/to/blob.txt``). - - :type expiration: :class:`int`, :class:`long`, :class:`datetime.datetime`, - :class:`datetime.timedelta` - :param expiration: When the signed URL should expire. - - :type api_access_endpoint: str - :param api_access_endpoint: Optional URI base. Defaults to empty string. - - :type method: str - :param method: The HTTP verb that will be used when requesting the URL. - Defaults to ``'GET'``. - - :type content_md5: str - :param content_md5: (Optional) The MD5 hash of the object referenced by - ``resource``. - - :type content_type: str - :param content_type: (Optional) The content type of the object referenced - by ``resource``. - - :type response_type: str - :param response_type: (Optional) Content type of responses to requests for - the signed URL. Used to over-ride the content type of - the underlying resource. - - :type response_disposition: str - :param response_disposition: (Optional) Content disposition of responses to - requests for the signed URL. - - :type generation: str - :param generation: (Optional) A value that indicates which generation of - the resource to fetch. - - :rtype: str - :returns: A signed URL you can use to access the resource - until expiration. - """ - expiration = _get_expiration_seconds(expiration) - - # Generate the string to sign. - string_to_sign = '\n'.join([ - method, - content_md5 or '', - content_type or '', - str(expiration), - resource]) - - # Set the right query parameters. - query_params = _get_signed_query_params(credentials, - expiration, - string_to_sign) - if response_type is not None: - query_params['response-content-type'] = response_type - if response_disposition is not None: - query_params['response-content-disposition'] = response_disposition - if generation is not None: - query_params['generation'] = generation - - # Return the built URL. - return '{endpoint}{resource}?{querystring}'.format( - endpoint=api_access_endpoint, resource=resource, - querystring=urlencode(query_params)) diff --git a/core/tests/unit/test_credentials.py b/core/tests/unit/test_credentials.py index aaffa907dda1..3b313c1dc1d6 100644 --- a/core/tests/unit/test_credentials.py +++ b/core/tests/unit/test_credentials.py @@ -15,7 +15,6 @@ import unittest import mock -import six class Test_get_credentials(unittest.TestCase): @@ -33,201 +32,3 @@ def test_it(self): self.assertIs(found, mock.sentinel.credentials) default.assert_called_once_with() - - -class Test_generate_signed_url(unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.credentials import generate_signed_url - - return generate_signed_url(*args, **kwargs) - - def _generate_helper(self, response_type=None, response_disposition=None, - generation=None): - import base64 - from six.moves.urllib.parse import parse_qs - from six.moves.urllib.parse import urlsplit - import google.auth.credentials - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - ENDPOINT = 'http://api.example.com' - RESOURCE = '/name/path' - SIGNED = base64.b64encode(b'DEADBEEF') - CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) - CREDENTIALS.signer_email = 'service@example.com' - - def _get_signed_query_params(*args): - credentials, expiration = args[:2] - return { - 'GoogleAccessId': credentials.signer_email, - 'Expires': str(expiration), - 'Signature': SIGNED, - } - - with _Monkey(MUT, _get_signed_query_params=_get_signed_query_params): - url = self._call_fut(CREDENTIALS, RESOURCE, 1000, - api_access_endpoint=ENDPOINT, - response_type=response_type, - response_disposition=response_disposition, - generation=generation) - - scheme, netloc, path, qs, frag = urlsplit(url) - self.assertEqual(scheme, 'http') - self.assertEqual(netloc, 'api.example.com') - self.assertEqual(path, RESOURCE) - params = parse_qs(qs) - # In Py3k, parse_qs gives us text values: - self.assertEqual(params.pop('Signature'), [SIGNED.decode('ascii')]) - self.assertEqual(params.pop('Expires'), ['1000']) - self.assertEqual(params.pop('GoogleAccessId'), - [CREDENTIALS.signer_email]) - if response_type is not None: - self.assertEqual(params.pop('response-content-type'), - [response_type]) - if response_disposition is not None: - self.assertEqual(params.pop('response-content-disposition'), - [response_disposition]) - if generation is not None: - self.assertEqual(params.pop('generation'), [generation]) - # Make sure we have checked them all. - self.assertEqual(len(params), 0) - self.assertEqual(frag, '') - - def test_w_expiration_int(self): - self._generate_helper() - - def test_w_custom_fields(self): - response_type = 'text/plain' - response_disposition = 'attachment; filename=blob.png' - generation = '123' - self._generate_helper(response_type=response_type, - response_disposition=response_disposition, - generation=generation) - - -class Test_generate_signed_url_exception(unittest.TestCase): - def test_with_google_credentials(self): - import time - import google.auth.credentials - from google.cloud.credentials import generate_signed_url - - RESOURCE = '/name/path' - - credentials = mock.Mock(spec=google.auth.credentials.Credentials) - expiration = int(time.time() + 5) - self.assertRaises(AttributeError, generate_signed_url, credentials, - resource=RESOURCE, expiration=expiration) - - -class Test__get_signed_query_params(unittest.TestCase): - - def _call_fut(self, credentials, expiration, string_to_sign): - from google.cloud.credentials import _get_signed_query_params - - return _get_signed_query_params(credentials, expiration, - string_to_sign) - - def test_it(self): - import base64 - import google.auth.credentials - - SIG_BYTES = b'DEADBEEF' - ACCOUNT_NAME = mock.sentinel.service_account_email - CREDENTIALS = mock.Mock(spec=google.auth.credentials.Signing) - CREDENTIALS.signer_email = ACCOUNT_NAME - CREDENTIALS.sign_bytes.return_value = SIG_BYTES - EXPIRATION = 100 - STRING_TO_SIGN = 'dummy_signature' - result = self._call_fut(CREDENTIALS, EXPIRATION, - STRING_TO_SIGN) - - self.assertEqual(result, { - 'GoogleAccessId': ACCOUNT_NAME, - 'Expires': str(EXPIRATION), - 'Signature': base64.b64encode(b'DEADBEEF'), - }) - CREDENTIALS.sign_bytes.assert_called_once_with(STRING_TO_SIGN) - - -class Test__get_expiration_seconds(unittest.TestCase): - - def _call_fut(self, expiration): - from google.cloud.credentials import _get_expiration_seconds - - return _get_expiration_seconds(expiration) - - def _utc_seconds(self, when): - import calendar - - return int(calendar.timegm(when.timetuple())) - - def test_w_invalid(self): - self.assertRaises(TypeError, self._call_fut, object()) - self.assertRaises(TypeError, self._call_fut, None) - - def test_w_int(self): - self.assertEqual(self._call_fut(123), 123) - - def test_w_long(self): - if six.PY3: - raise unittest.SkipTest('No long on Python 3') - - self.assertEqual(self._call_fut(long(123)), 123) # noqa: F821 - - def test_w_naive_datetime(self): - import datetime - - expiration_no_tz = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(expiration_no_tz) - self.assertEqual(self._call_fut(expiration_no_tz), utc_seconds) - - def test_w_utc_datetime(self): - import datetime - from google.cloud._helpers import UTC - - expiration_utc = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) - utc_seconds = self._utc_seconds(expiration_utc) - self.assertEqual(self._call_fut(expiration_utc), utc_seconds) - - def test_w_other_zone_datetime(self): - import datetime - from google.cloud._helpers import _UTC - - class CET(_UTC): - _tzname = 'CET' - _utcoffset = datetime.timedelta(hours=1) - - zone = CET() - expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) - utc_seconds = self._utc_seconds(expiration_other) - cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC - self.assertEqual(self._call_fut(expiration_other), cet_seconds) - - def test_w_timedelta_seconds(self): - import datetime - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(dummy_utcnow) - expiration_as_delta = datetime.timedelta(seconds=10) - - with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._call_fut(expiration_as_delta) - - self.assertEqual(result, utc_seconds + 10) - - def test_w_timedelta_days(self): - import datetime - from google.cloud._testing import _Monkey - from google.cloud import credentials as MUT - - dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) - utc_seconds = self._utc_seconds(dummy_utcnow) - expiration_as_delta = datetime.timedelta(days=1) - - with _Monkey(MUT, _NOW=lambda: dummy_utcnow): - result = self._call_fut(expiration_as_delta) - - self.assertEqual(result, utc_seconds + 86400) diff --git a/storage/google/cloud/storage/_signing.py b/storage/google/cloud/storage/_signing.py new file mode 100644 index 000000000000..58e62ac1502d --- /dev/null +++ b/storage/google/cloud/storage/_signing.py @@ -0,0 +1,189 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import base64 +import datetime + +import six + +import google.auth.credentials +from google.cloud import _helpers + + +NOW = datetime.datetime.utcnow # To be replaced by tests. + + +def get_signed_query_params(credentials, expiration, string_to_sign): + """Gets query parameters for creating a signed URL. + + :type credentials: :class:`google.auth.credentials.Signer` + :param credentials: The credentials used to create a private key + for signing text. + + :type expiration: int or long + :param expiration: When the signed URL should expire. + + :type string_to_sign: str + :param string_to_sign: The string to be signed by the credentials. + + :raises AttributeError: If :meth: sign_blob is unavailable. + + :rtype: dict + :returns: Query parameters matching the signing credentials with a + signed payload. + """ + if not isinstance(credentials, google.auth.credentials.Signing): + auth_uri = ('https://google-cloud-python.readthedocs.io/en/latest/' + 'core/auth.html?highlight=authentication#setting-up-' + 'a-service-account') + raise AttributeError('you need a private key to sign credentials.' + 'the credentials you are currently using %s ' + 'just contains a token. see %s for more ' + 'details.' % (type(credentials), auth_uri)) + + signature_bytes = credentials.sign_bytes(string_to_sign) + signature = base64.b64encode(signature_bytes) + service_account_name = credentials.signer_email + return { + 'GoogleAccessId': service_account_name, + 'Expires': str(expiration), + 'Signature': signature, + } + + +def get_expiration_seconds(expiration): + """Convert 'expiration' to a number of seconds in the future. + + :type expiration: int, long, datetime.datetime, datetime.timedelta + :param expiration: When the signed URL should expire. + + :raises TypeError: When expiration is not an integer. + + :rtype: int + :returns: a timestamp as an absolute number of seconds. + """ + # If it's a timedelta, add it to `now` in UTC. + if isinstance(expiration, datetime.timedelta): + now = NOW().replace(tzinfo=_helpers.UTC) + expiration = now + expiration + + # If it's a datetime, convert to a timestamp. + if isinstance(expiration, datetime.datetime): + micros = _helpers._microseconds_from_datetime(expiration) + expiration = micros // 10**6 + + if not isinstance(expiration, six.integer_types): + raise TypeError('Expected an integer timestamp, datetime, or ' + 'timedelta. Got %s' % type(expiration)) + return expiration + + +def generate_signed_url(credentials, resource, expiration, + api_access_endpoint='', + method='GET', content_md5=None, + content_type=None, response_type=None, + response_disposition=None, generation=None): + """Generate signed URL to provide query-string auth'n to a resource. + + .. note:: + + Assumes ``credentials`` implements the + :class:`google.auth.credentials.Signing` interface. Also assumes + ``credentials`` has a ``service_account_email`` property which + identifies the credentials. + + .. note:: + + If you are on Google Compute Engine, you can't generate a signed URL. + Follow `Issue 922`_ for updates on this. If you'd like to be able to + generate a signed URL from GCE, you can use a standard service account + from a JSON file rather than a GCE service account. + + See headers `reference`_ for more details on optional arguments. + + .. _Issue 922: https://github.com/GoogleCloudPlatform/\ + google-cloud-python/issues/922 + .. _reference: https://cloud.google.com/storage/docs/reference-headers + + :type credentials: :class:`google.auth.credentials.Signing` + :param credentials: Credentials object with an associated private key to + sign text. + + :type resource: str + :param resource: A pointer to a specific resource + (typically, ``/bucket-name/path/to/blob.txt``). + + :type expiration: :class:`int`, :class:`long`, :class:`datetime.datetime`, + :class:`datetime.timedelta` + :param expiration: When the signed URL should expire. + + :type api_access_endpoint: str + :param api_access_endpoint: Optional URI base. Defaults to empty string. + + :type method: str + :param method: The HTTP verb that will be used when requesting the URL. + Defaults to ``'GET'``. + + :type content_md5: str + :param content_md5: (Optional) The MD5 hash of the object referenced by + ``resource``. + + :type content_type: str + :param content_type: (Optional) The content type of the object referenced + by ``resource``. + + :type response_type: str + :param response_type: (Optional) Content type of responses to requests for + the signed URL. Used to over-ride the content type of + the underlying resource. + + :type response_disposition: str + :param response_disposition: (Optional) Content disposition of responses to + requests for the signed URL. + + :type generation: str + :param generation: (Optional) A value that indicates which generation of + the resource to fetch. + + :rtype: str + :returns: A signed URL you can use to access the resource + until expiration. + """ + expiration = get_expiration_seconds(expiration) + + # Generate the string to sign. + string_to_sign = '\n'.join([ + method, + content_md5 or '', + content_type or '', + str(expiration), + resource, + ]) + + # Set the right query parameters. + query_params = get_signed_query_params( + credentials, expiration, string_to_sign) + + if response_type is not None: + query_params['response-content-type'] = response_type + if response_disposition is not None: + query_params['response-content-disposition'] = response_disposition + if generation is not None: + query_params['generation'] = generation + + # Return the built URL. + return '{endpoint}{resource}?{querystring}'.format( + endpoint=api_access_endpoint, resource=resource, + querystring=six.moves.urllib.parse.urlencode(query_params)) diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index 8d6ec2619ea1..dfefc3c1a4fa 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -47,12 +47,12 @@ from google.cloud._helpers import _rfc3339_to_datetime from google.cloud._helpers import _to_bytes from google.cloud._helpers import _bytes_to_unicode -from google.cloud.credentials import generate_signed_url from google.cloud.exceptions import NotFound from google.cloud.exceptions import make_exception from google.cloud.iam import Policy from google.cloud.storage._helpers import _PropertyMixin from google.cloud.storage._helpers import _scalar_property +from google.cloud.storage._signing import generate_signed_url from google.cloud.storage.acl import ObjectACL diff --git a/storage/nox.py b/storage/nox.py index 3de8efed3fd9..18ccf81aaff2 100644 --- a/storage/nox.py +++ b/storage/nox.py @@ -39,10 +39,16 @@ def unit_tests(session, python_version): # Run py.test against the unit tests. session.run( - 'py.test', '--quiet', - '--cov=google.cloud.storage', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + 'py.test', + '--quiet', + '--cov=google.cloud.storage', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', 'tests/unit', + *session.posargs ) diff --git a/storage/tests/unit/test__signing.py b/storage/tests/unit/test__signing.py new file mode 100644 index 000000000000..1e2aabb9d25e --- /dev/null +++ b/storage/tests/unit/test__signing.py @@ -0,0 +1,222 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import calendar +import datetime +import time +import unittest + +import mock +import six +from six.moves import urllib_parse + + +class Test_get_expiration_seconds(unittest.TestCase): + + @staticmethod + def _call_fut(expiration): + from google.cloud.storage._signing import get_expiration_seconds + + return get_expiration_seconds(expiration) + + @staticmethod + def _utc_seconds(when): + return int(calendar.timegm(when.timetuple())) + + def test_w_invalid(self): + self.assertRaises(TypeError, self._call_fut, object()) + self.assertRaises(TypeError, self._call_fut, None) + + def test_w_int(self): + self.assertEqual(self._call_fut(123), 123) + + def test_w_long(self): + if six.PY3: + raise unittest.SkipTest('No long on Python 3') + + self.assertEqual(self._call_fut(long(123)), 123) # noqa: F821 + + def test_w_naive_datetime(self): + expiration_no_tz = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(expiration_no_tz) + self.assertEqual(self._call_fut(expiration_no_tz), utc_seconds) + + def test_w_utc_datetime(self): + from google.cloud._helpers import UTC + + expiration_utc = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) + utc_seconds = self._utc_seconds(expiration_utc) + self.assertEqual(self._call_fut(expiration_utc), utc_seconds) + + def test_w_other_zone_datetime(self): + from google.cloud._helpers import _UTC + + class CET(_UTC): + _tzname = 'CET' + _utcoffset = datetime.timedelta(hours=1) + + zone = CET() + expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) + utc_seconds = self._utc_seconds(expiration_other) + cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC + self.assertEqual(self._call_fut(expiration_other), cet_seconds) + + def test_w_timedelta_seconds(self): + dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(dummy_utcnow) + expiration_as_delta = datetime.timedelta(seconds=10) + + patch = mock.patch( + 'google.cloud.storage._signing.NOW', + return_value=dummy_utcnow) + with patch as utcnow: + result = self._call_fut(expiration_as_delta) + + self.assertEqual(result, utc_seconds + 10) + utcnow.assert_called_once_with() + + def test_w_timedelta_days(self): + dummy_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0) + utc_seconds = self._utc_seconds(dummy_utcnow) + expiration_as_delta = datetime.timedelta(days=1) + + patch = mock.patch( + 'google.cloud.storage._signing.NOW', + return_value=dummy_utcnow) + with patch as utcnow: + result = self._call_fut(expiration_as_delta) + + self.assertEqual(result, utc_seconds + 86400) + utcnow.assert_called_once_with() + + +class Test_get_signed_query_params(unittest.TestCase): + + @staticmethod + def _call_fut(credentials, expiration, string_to_sign): + from google.cloud.storage._signing import get_signed_query_params + + return get_signed_query_params( + credentials, expiration, string_to_sign) + + def test_it(self): + sig_bytes = b'DEADBEEF' + account_name = mock.sentinel.service_account_email + credentials = _make_credentials( + signing=True, signer_email=account_name) + credentials.sign_bytes.return_value = sig_bytes + expiration = 100 + string_to_sign = 'dummy_signature' + result = self._call_fut( + credentials, expiration, string_to_sign) + + expected = { + 'GoogleAccessId': account_name, + 'Expires': str(expiration), + 'Signature': base64.b64encode(sig_bytes), + } + self.assertEqual(result, expected) + credentials.sign_bytes.assert_called_once_with(string_to_sign) + + +class Test_generate_signed_url(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.storage._signing import generate_signed_url + + return generate_signed_url(*args, **kwargs) + + def _generate_helper(self, response_type=None, response_disposition=None, + generation=None): + endpoint = 'http://api.example.com' + resource = '/name/path' + credentials = _make_credentials( + signing=True, signer_email='service@example.com') + credentials.sign_bytes.return_value = b'DEADBEEF' + signed = base64.b64encode(credentials.sign_bytes.return_value) + signed = signed.decode('ascii') + + expiration = 1000 + url = self._call_fut( + credentials, + resource, + expiration, + api_access_endpoint=endpoint, + response_type=response_type, + response_disposition=response_disposition, + generation=generation, + ) + + # Check the mock was called. + string_to_sign = '\n'.join([ + 'GET', + '', + '', + str(expiration), + resource, + ]) + credentials.sign_bytes.assert_called_once_with(string_to_sign) + + scheme, netloc, path, qs, frag = urllib_parse.urlsplit(url) + self.assertEqual(scheme, 'http') + self.assertEqual(netloc, 'api.example.com') + self.assertEqual(path, resource) + self.assertEqual(frag, '') + + # Check the URL parameters. + params = urllib_parse.parse_qs(qs) + expected_params = { + 'GoogleAccessId': [credentials.signer_email], + 'Expires': [str(expiration)], + 'Signature': [signed], + } + if response_type is not None: + expected_params['response-content-type'] = [response_type] + if response_disposition is not None: + expected_params['response-content-disposition'] = [ + response_disposition] + if generation is not None: + expected_params['generation'] = [generation] + self.assertEqual(params, expected_params) + + def test_w_expiration_int(self): + self._generate_helper() + + def test_w_custom_fields(self): + response_type = 'text/plain' + response_disposition = 'attachment; filename=blob.png' + generation = '123' + self._generate_helper(response_type=response_type, + response_disposition=response_disposition, + generation=generation) + + def test_with_google_credentials(self): + resource = '/name/path' + credentials = _make_credentials() + expiration = int(time.time() + 5) + self.assertRaises(AttributeError, self._call_fut, credentials, + resource=resource, expiration=expiration) + + +def _make_credentials(signing=False, signer_email=None): + import google.auth.credentials + + if signing: + credentials = mock.Mock(spec=google.auth.credentials.Signing) + credentials.signer_email = signer_email + return credentials + else: + return mock.Mock(spec=google.auth.credentials.Credentials) From 7cc07b544d636a6b1590c9c1b97b5b80da178a26 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 25 Jul 2017 16:51:34 -0400 Subject: [PATCH 54/62] Unbind transaction from session on commit/rollback. (#3669) Closes #3014. --- spanner/google/cloud/spanner/session.py | 2 -- spanner/google/cloud/spanner/transaction.py | 2 ++ spanner/tests/unit/test_transaction.py | 10 ++++++++-- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/spanner/google/cloud/spanner/session.py b/spanner/google/cloud/spanner/session.py index 45baffa92d43..f25abdd6261a 100644 --- a/spanner/google/cloud/spanner/session.py +++ b/spanner/google/cloud/spanner/session.py @@ -302,7 +302,6 @@ def run_in_transaction(self, func, *args, **kw): continue except Exception: txn.rollback() - del self._transaction raise try: @@ -312,7 +311,6 @@ def run_in_transaction(self, func, *args, **kw): del self._transaction else: committed = txn.committed - del self._transaction return committed diff --git a/spanner/google/cloud/spanner/transaction.py b/spanner/google/cloud/spanner/transaction.py index af2140896830..7c0272d41132 100644 --- a/spanner/google/cloud/spanner/transaction.py +++ b/spanner/google/cloud/spanner/transaction.py @@ -93,6 +93,7 @@ def rollback(self): options = _options_with_prefix(database.name) api.rollback(self._session.name, self._id, options=options) self._rolled_back = True + del self._session._transaction def commit(self): """Commit mutations to the database. @@ -114,6 +115,7 @@ def commit(self): transaction_id=self._id, options=options) self.committed = _pb_timestamp_to_datetime( response.commit_timestamp) + del self._session._transaction return self.committed def __enter__(self): diff --git a/spanner/tests/unit/test_transaction.py b/spanner/tests/unit/test_transaction.py index 997f4d5153c8..973aeedb179d 100644 --- a/spanner/tests/unit/test_transaction.py +++ b/spanner/tests/unit/test_transaction.py @@ -42,8 +42,10 @@ def _getTargetClass(self): return Transaction - def _make_one(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + def _make_one(self, session, *args, **kwargs): + transaction = self._getTargetClass()(session, *args, **kwargs) + session._transaction = transaction + return transaction def test_ctor_defaults(self): session = _Session() @@ -208,6 +210,7 @@ def test_rollback_ok(self): transaction.rollback() self.assertTrue(transaction._rolled_back) + self.assertIsNone(session._transaction) session_id, txn_id, options = api._rolled_back self.assertEqual(session_id, session.name) @@ -290,6 +293,7 @@ def test_commit_ok(self): transaction.commit() self.assertEqual(transaction.committed, now) + self.assertIsNone(session._transaction) session_id, mutations, txn_id, options = api._committed self.assertEqual(session_id, session.name) @@ -368,6 +372,8 @@ class _Database(object): class _Session(object): + _transaction = None + def __init__(self, database=None, name=TestTransaction.SESSION_NAME): self._database = database self.name = name From 23e6c90b25995e12bb078fa1b0540b08c1bf4c22 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Jul 2017 14:13:44 -0700 Subject: [PATCH 55/62] Removing `get_credentials()` from `core`. (#3667) * Removing `get_credentials()` from `core`. In the process also: - Slight re-org on `nox.py` config (to pass posargs) for `core` and `datastore` - Getting rid of last usage of `_Monkey` in datastore This is part of `@jonparrott`'s effort to slim down / stabilize `core`. * Removing `google.cloud.credentials` module from docs. --- bigtable/google/cloud/bigtable/client.py | 4 +- bigtable/tests/unit/test_client.py | 13 ++-- core/google/cloud/client.py | 4 +- core/google/cloud/credentials.py | 30 --------- core/nox.py | 23 +++++-- core/tests/unit/test_client.py | 83 ++++++++++-------------- core/tests/unit/test_credentials.py | 34 ---------- datastore/nox.py | 15 +++-- datastore/tests/unit/test_client.py | 18 ++--- datastore/tests/unit/test_query.py | 23 +++---- docs/core/modules.rst | 7 -- spanner/google/cloud/spanner/client.py | 4 +- spanner/tests/unit/test_client.py | 12 ++-- 13 files changed, 95 insertions(+), 175 deletions(-) delete mode 100644 core/google/cloud/credentials.py delete mode 100644 core/tests/unit/test_credentials.py diff --git a/bigtable/google/cloud/bigtable/client.py b/bigtable/google/cloud/bigtable/client.py index 86ee7173c917..62877371a945 100644 --- a/bigtable/google/cloud/bigtable/client.py +++ b/bigtable/google/cloud/bigtable/client.py @@ -31,6 +31,7 @@ import os +import google.auth import google.auth.credentials from google.gax.utils import metrics from google.longrunning import operations_grpc @@ -40,7 +41,6 @@ from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.client import _ClientFactoryMixin from google.cloud.client import _ClientProjectMixin -from google.cloud.credentials import get_credentials from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.cloud.bigtable import __version__ @@ -211,7 +211,7 @@ def __init__(self, project=None, credentials=None, read_only=False, admin=False, user_agent=DEFAULT_USER_AGENT): _ClientProjectMixin.__init__(self, project=project) if credentials is None: - credentials = get_credentials() + credentials, _ = google.auth.default() if read_only and admin: raise ValueError('A read-only client cannot also perform' diff --git a/bigtable/tests/unit/test_client.py b/bigtable/tests/unit/test_client.py index 17656be60c00..c3ab8d1ed888 100644 --- a/bigtable/tests/unit/test_client.py +++ b/bigtable/tests/unit/test_client.py @@ -360,20 +360,19 @@ def test_constructor_both_admin_and_read_only(self): read_only=True) def test_constructor_implicit_credentials(self): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT + from google.cloud.bigtable.client import DATA_SCOPE creds = _make_credentials() - expected_scopes = [MUT.DATA_SCOPE] - - def mock_get_credentials(): - return creds + expected_scopes = [DATA_SCOPE] - with _Monkey(MUT, get_credentials=mock_get_credentials): + patch = mock.patch( + 'google.auth.default', return_value=(creds, None)) + with patch as default: self._constructor_test_helper( None, None, expected_creds=creds.with_scopes.return_value) + default.assert_called_once_with() creds.with_scopes.assert_called_once_with(expected_scopes) def test_constructor_credentials_wo_create_scoped(self): diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index 5906ab5ed108..468cf9e40a52 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -21,9 +21,9 @@ import google_auth_httplib2 import six +import google.auth import google.auth.credentials from google.cloud._helpers import _determine_default_project -from google.cloud.credentials import get_credentials from google.oauth2 import service_account @@ -135,7 +135,7 @@ def __init__(self, credentials=None, _http=None): credentials, google.auth.credentials.Credentials)): raise ValueError(_GOOGLE_AUTH_CREDENTIALS_HELP) if credentials is None and _http is None: - credentials = get_credentials() + credentials, _ = google.auth.default() self._credentials = google.auth.credentials.with_scopes_if_required( credentials, self.SCOPE) self._http_internal = _http diff --git a/core/google/cloud/credentials.py b/core/google/cloud/credentials.py deleted file mode 100644 index b434cac2f1e7..000000000000 --- a/core/google/cloud/credentials.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2014 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""A simple wrapper around the OAuth2 credentials library.""" - -import google.auth - - -def get_credentials(): - """Gets credentials implicitly from the current environment. - - Uses :func:`google.auth.default()`. - - :rtype: :class:`google.auth.credentials.Credentials`, - :returns: A new credentials instance corresponding to the implicit - environment. - """ - credentials, _ = google.auth.default() - return credentials diff --git a/core/nox.py b/core/nox.py index 48b55332283e..1dca10eb9b69 100644 --- a/core/nox.py +++ b/core/nox.py @@ -13,6 +13,7 @@ # limitations under the License. from __future__ import absolute_import +import os import nox @@ -29,16 +30,26 @@ def unit_tests(session, python_version): session.virtualenv_dirname = 'unit-' + python_version # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', - 'grpcio >= 1.0.2') + session.install( + 'mock', + 'pytest', + 'pytest-cov', + 'grpcio >= 1.0.2', + ) session.install('-e', '.') # Run py.test against the unit tests. session.run( - 'py.test', '--quiet', - '--cov=google.cloud', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + 'py.test', + '--quiet', + '--cov=google.cloud', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) diff --git a/core/tests/unit/test_client.py b/core/tests/unit/test_client.py index 14eac68abee3..25667712c69a 100644 --- a/core/tests/unit/test_client.py +++ b/core/tests/unit/test_client.py @@ -59,37 +59,31 @@ def test_unpickleable(self): with self.assertRaises(pickle.PicklingError): pickle.dumps(client_obj) - def test_ctor_defaults(self): - from google.cloud._testing import _Monkey - from google.cloud import client - - CREDENTIALS = _make_credentials() - FUNC_CALLS = [] - - def mock_get_credentials(): - FUNC_CALLS.append('get_credentials') - return CREDENTIALS + def test_constructor_defaults(self): + credentials = _make_credentials() - with _Monkey(client, get_credentials=mock_get_credentials): + patch = mock.patch( + 'google.auth.default', return_value=(credentials, None)) + with patch as default: client_obj = self._make_one() - self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) - self.assertEqual(FUNC_CALLS, ['get_credentials']) + default.assert_called_once_with() - def test_ctor_explicit(self): - CREDENTIALS = _make_credentials() - HTTP = object() - client_obj = self._make_one(credentials=CREDENTIALS, _http=HTTP) + def test_constructor_explicit(self): + credentials = _make_credentials() + http = mock.sentinel.http + client_obj = self._make_one(credentials=credentials, _http=http) - self.assertIs(client_obj._credentials, CREDENTIALS) - self.assertIs(client_obj._http_internal, HTTP) + self.assertIs(client_obj._credentials, credentials) + self.assertIs(client_obj._http_internal, http) - def test_ctor_bad_credentials(self): - CREDENTIALS = object() + def test_constructor_bad_credentials(self): + credentials = mock.sentinel.credentials with self.assertRaises(ValueError): - self._make_one(credentials=CREDENTIALS) + self._make_one(credentials=credentials) def test_from_service_account_json(self): from google.cloud import _helpers @@ -162,34 +156,27 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor_defaults(self): - from google.cloud._testing import _Monkey - from google.cloud import client - - PROJECT = 'PROJECT' - CREDENTIALS = _make_credentials() - FUNC_CALLS = [] - - def mock_determine_proj(project): - FUNC_CALLS.append((project, '_determine_default_project')) - return PROJECT + def test_constructor_defaults(self): + credentials = _make_credentials() + patch1 = mock.patch( + 'google.auth.default', return_value=(credentials, None)) - def mock_get_credentials(): - FUNC_CALLS.append('get_credentials') - return CREDENTIALS + project = 'prahj-ekt' + patch2 = mock.patch( + 'google.cloud.client._determine_default_project', + return_value=project) - with _Monkey(client, get_credentials=mock_get_credentials, - _determine_default_project=mock_determine_proj): - client_obj = self._make_one() + with patch1 as default: + with patch2 as _determine_default_project: + client_obj = self._make_one() - self.assertEqual(client_obj.project, PROJECT) - self.assertIs(client_obj._credentials, CREDENTIALS) + self.assertEqual(client_obj.project, project) + self.assertIs(client_obj._credentials, credentials) self.assertIsNone(client_obj._http_internal) - self.assertEqual( - FUNC_CALLS, - [(None, '_determine_default_project'), 'get_credentials']) + default.assert_called_once_with() + _determine_default_project.assert_called_once_with(None) - def test_ctor_missing_project(self): + def test_constructor_missing_project(self): from google.cloud._testing import _Monkey from google.cloud import client @@ -204,7 +191,7 @@ def mock_determine_proj(project): self.assertEqual(FUNC_CALLS, [(None, '_determine_default_project')]) - def test_ctor_w_invalid_project(self): + def test_constructor_w_invalid_project(self): CREDENTIALS = _make_credentials() HTTP = object() with self.assertRaises(ValueError): @@ -227,11 +214,11 @@ def _explicit_ctor_helper(self, project): self.assertIs(client_obj._credentials, CREDENTIALS) self.assertIs(client_obj._http_internal, HTTP) - def test_ctor_explicit_bytes(self): + def test_constructor_explicit_bytes(self): PROJECT = b'PROJECT' self._explicit_ctor_helper(PROJECT) - def test_ctor_explicit_unicode(self): + def test_constructor_explicit_unicode(self): PROJECT = u'PROJECT' self._explicit_ctor_helper(PROJECT) diff --git a/core/tests/unit/test_credentials.py b/core/tests/unit/test_credentials.py deleted file mode 100644 index 3b313c1dc1d6..000000000000 --- a/core/tests/unit/test_credentials.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2014 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class Test_get_credentials(unittest.TestCase): - - def _call_fut(self): - from google.cloud import credentials - - return credentials.get_credentials() - - def test_it(self): - with mock.patch('google.auth.default', autospec=True) as default: - default.return_value = ( - mock.sentinel.credentials, mock.sentinel.project) - found = self._call_fut() - - self.assertIs(found, mock.sentinel.credentials) - default.assert_called_once_with() diff --git a/datastore/nox.py b/datastore/nox.py index 2cf2186aa45a..f93b02944631 100644 --- a/datastore/nox.py +++ b/datastore/nox.py @@ -38,10 +38,17 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.datastore', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.datastore', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + os.path.join('tests', 'unit'), + *session.posargs ) diff --git a/datastore/tests/unit/test_client.py b/datastore/tests/unit/test_client.py index 9824e06b73ad..a03bbe8b710f 100644 --- a/datastore/tests/unit/test_client.py +++ b/datastore/tests/unit/test_client.py @@ -148,22 +148,16 @@ def test_constructor_w_implicit_inputs(self): other = 'other' creds = _make_credentials() - default_called = [] - - def fallback_mock(project): - default_called.append(project) - return project or other klass = self._get_target_class() patch1 = mock.patch( 'google.cloud.datastore.client._determine_default_project', - new=fallback_mock) + return_value=other) patch2 = mock.patch( - 'google.cloud.client.get_credentials', - return_value=creds) + 'google.auth.default', return_value=(creds, None)) - with patch1: - with patch2: + with patch1 as _determine_default_project: + with patch2 as default: client = klass() self.assertEqual(client.project, other) @@ -174,7 +168,9 @@ def fallback_mock(project): self.assertIsNone(client.current_batch) self.assertIsNone(client.current_transaction) - self.assertEqual(default_called, [None]) + + default.assert_called_once_with() + _determine_default_project.assert_called_once_with(None) def test_constructor_w_explicit_inputs(self): from google.cloud.datastore.client import _DATASTORE_BASE_URL diff --git a/datastore/tests/unit/test_query.py b/datastore/tests/unit/test_query.py index b361ec25a42f..26c1b6cc0831 100644 --- a/datastore/tests/unit/test_query.py +++ b/datastore/tests/unit/test_query.py @@ -550,21 +550,14 @@ def _call_fut(self, iterator, entity_pb): return _item_to_entity(iterator, entity_pb) def test_it(self): - from google.cloud._testing import _Monkey - from google.cloud.datastore import helpers - - result = object() - entities = [] - - def mocked(entity_pb): - entities.append(entity_pb) - return result - - entity_pb = object() - with _Monkey(helpers, entity_from_protobuf=mocked): - self.assertIs(result, self._call_fut(None, entity_pb)) - - self.assertEqual(entities, [entity_pb]) + entity_pb = mock.sentinel.entity_pb + patch = mock.patch( + 'google.cloud.datastore.helpers.entity_from_protobuf') + with patch as entity_from_protobuf: + result = self._call_fut(None, entity_pb) + self.assertIs(result, entity_from_protobuf.return_value) + + entity_from_protobuf.assert_called_once_with(entity_pb) class Test__pb_from_query(unittest.TestCase): diff --git a/docs/core/modules.rst b/docs/core/modules.rst index 195a79c5abb2..a1cdbc456de5 100644 --- a/docs/core/modules.rst +++ b/docs/core/modules.rst @@ -9,13 +9,6 @@ Base Client :show-inheritance: :inherited-members: -Credentials Helpers -~~~~~~~~~~~~~~~~~~~ - -.. automodule:: google.cloud.credentials - :members: - :show-inheritance: - Exceptions ~~~~~~~~~~ diff --git a/spanner/google/cloud/spanner/client.py b/spanner/google/cloud/spanner/client.py index 875238aed2bc..b701b017abb0 100644 --- a/spanner/google/cloud/spanner/client.py +++ b/spanner/google/cloud/spanner/client.py @@ -24,6 +24,7 @@ :class:`~google.cloud.spanner.database.Database` """ +import google.auth import google.auth.credentials from google.gax import INITIAL_PAGE # pylint: disable=line-too-long @@ -36,7 +37,6 @@ from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.client import _ClientFactoryMixin from google.cloud.client import _ClientProjectMixin -from google.cloud.credentials import get_credentials from google.cloud.iterator import GAXIterator from google.cloud.spanner import __version__ from google.cloud.spanner._helpers import _options_with_prefix @@ -109,7 +109,7 @@ def __init__(self, project=None, credentials=None, _ClientProjectMixin.__init__(self, project=project) if credentials is None: - credentials = get_credentials() + credentials, _ = google.auth.default() scopes = [ SPANNER_ADMIN_SCOPE, diff --git a/spanner/tests/unit/test_client.py b/spanner/tests/unit/test_client.py index c71429c22535..e5e90fd6b7ab 100644 --- a/spanner/tests/unit/test_client.py +++ b/spanner/tests/unit/test_client.py @@ -88,19 +88,17 @@ def test_constructor_custom_user_agent_and_timeout(self): user_agent=CUSTOM_USER_AGENT) def test_constructor_implicit_credentials(self): - from google.cloud._testing import _Monkey - from google.cloud.spanner import client as MUT - creds = _make_credentials() - def mock_get_credentials(): - return creds - - with _Monkey(MUT, get_credentials=mock_get_credentials): + patch = mock.patch( + 'google.auth.default', return_value=(creds, None)) + with patch as default: self._constructor_test_helper( None, None, expected_creds=creds.with_scopes.return_value) + default.assert_called_once_with() + def test_constructor_credentials_wo_create_scoped(self): creds = _make_credentials() expected_scopes = None From df5e6bb05cd4a103bffe9f0b99534f2e9412e30d Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 26 Jul 2017 11:16:59 -0700 Subject: [PATCH 56/62] NL GAPIC (#3679) --- docs/language/client.rst | 6 - docs/language/document.rst | 6 - docs/language/gapic/v1/api.rst | 6 + docs/language/gapic/v1/types.rst | 5 + docs/language/gapic/v1beta2/api.rst | 6 + docs/language/gapic/v1beta2/types.rst | 5 + docs/language/responses.rst | 37 - docs/language/usage.rst | 316 +- docs/vision/gapic/{ => v1}/api.rst | 0 docs/vision/gapic/{ => v1}/types.rst | 0 docs/vision/index.rst | 12 +- language/google/cloud/gapic/__init__.py | 1 + .../google/cloud/gapic/language/__init__.py | 1 + .../cloud/gapic/language/v1/__init__.py | 0 .../google/cloud/gapic/language/v1/enums.py | 516 +++ .../language/v1/language_service_client.py | 290 ++ .../v1/language_service_client_config.json | 46 + .../cloud/gapic/language/v1beta2/__init__.py | 0 .../cloud/gapic/language/v1beta2/enums.py | 516 +++ .../v1beta2/language_service_client.py | 326 ++ .../language_service_client_config.json | 51 + language/google/cloud/language/__init__.py | 37 +- language/google/cloud/language/client.py | 10 + language/google/cloud/language_v1/__init__.py | 30 + language/google/cloud/language_v1/types.py | 30 + .../google/cloud/language_v1beta2/__init__.py | 30 + .../google/cloud/language_v1beta2/types.py | 30 + language/google/cloud/proto/__init__.py | 1 + .../google/cloud/proto/language/__init__.py | 1 + .../cloud/proto/language/v1/__init__.py | 1 + .../proto/language/v1/language_service_pb2.py | 2647 +++++++++++++++ .../language/v1/language_service_pb2_grpc.py | 104 + .../cloud/proto/language/v1beta2/__init__.py | 1 + .../language/v1beta2/language_service_pb2.py | 2843 +++++++++++++++++ .../v1beta2/language_service_pb2_grpc.py | 122 + language/setup.py | 10 + .../gapic/v1/language_service_smoke_test.py | 30 + .../v1/test_language_service_client_v1.py | 232 ++ .../v1beta2/language_service_smoke_test.py | 30 + .../test_language_service_client_v1beta2.py | 283 ++ vision/setup.py | 2 +- 41 files changed, 8391 insertions(+), 229 deletions(-) delete mode 100644 docs/language/client.rst delete mode 100644 docs/language/document.rst create mode 100644 docs/language/gapic/v1/api.rst create mode 100644 docs/language/gapic/v1/types.rst create mode 100644 docs/language/gapic/v1beta2/api.rst create mode 100644 docs/language/gapic/v1beta2/types.rst delete mode 100644 docs/language/responses.rst rename docs/vision/gapic/{ => v1}/api.rst (100%) rename docs/vision/gapic/{ => v1}/types.rst (100%) create mode 100644 language/google/cloud/gapic/__init__.py create mode 100644 language/google/cloud/gapic/language/__init__.py create mode 100644 language/google/cloud/gapic/language/v1/__init__.py create mode 100644 language/google/cloud/gapic/language/v1/enums.py create mode 100644 language/google/cloud/gapic/language/v1/language_service_client.py create mode 100644 language/google/cloud/gapic/language/v1/language_service_client_config.json create mode 100644 language/google/cloud/gapic/language/v1beta2/__init__.py create mode 100644 language/google/cloud/gapic/language/v1beta2/enums.py create mode 100644 language/google/cloud/gapic/language/v1beta2/language_service_client.py create mode 100644 language/google/cloud/gapic/language/v1beta2/language_service_client_config.json create mode 100644 language/google/cloud/language_v1/__init__.py create mode 100644 language/google/cloud/language_v1/types.py create mode 100644 language/google/cloud/language_v1beta2/__init__.py create mode 100644 language/google/cloud/language_v1beta2/types.py create mode 100644 language/google/cloud/proto/__init__.py create mode 100644 language/google/cloud/proto/language/__init__.py create mode 100644 language/google/cloud/proto/language/v1/__init__.py create mode 100644 language/google/cloud/proto/language/v1/language_service_pb2.py create mode 100644 language/google/cloud/proto/language/v1/language_service_pb2_grpc.py create mode 100644 language/google/cloud/proto/language/v1beta2/__init__.py create mode 100644 language/google/cloud/proto/language/v1beta2/language_service_pb2.py create mode 100644 language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py create mode 100644 language/tests/gapic/v1/language_service_smoke_test.py create mode 100644 language/tests/gapic/v1/test_language_service_client_v1.py create mode 100644 language/tests/gapic/v1beta2/language_service_smoke_test.py create mode 100644 language/tests/gapic/v1beta2/test_language_service_client_v1beta2.py diff --git a/docs/language/client.rst b/docs/language/client.rst deleted file mode 100644 index 310e7b5bf828..000000000000 --- a/docs/language/client.rst +++ /dev/null @@ -1,6 +0,0 @@ -Natural Language Client -======================= - -.. automodule:: google.cloud.language.client - :members: - :show-inheritance: diff --git a/docs/language/document.rst b/docs/language/document.rst deleted file mode 100644 index e879b11e590a..000000000000 --- a/docs/language/document.rst +++ /dev/null @@ -1,6 +0,0 @@ -Document -~~~~~~~~ - -.. automodule:: google.cloud.language.document - :members: - :show-inheritance: diff --git a/docs/language/gapic/v1/api.rst b/docs/language/gapic/v1/api.rst new file mode 100644 index 000000000000..2c5fd4fd76ea --- /dev/null +++ b/docs/language/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Natural Language Client API +=========================== + +.. automodule:: google.cloud.language_v1 + :members: + :inherited-members: diff --git a/docs/language/gapic/v1/types.rst b/docs/language/gapic/v1/types.rst new file mode 100644 index 000000000000..90d27a4b96fb --- /dev/null +++ b/docs/language/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Natural Language Client Types +============================= + +.. automodule:: google.cloud.language_v1.types + :members: diff --git a/docs/language/gapic/v1beta2/api.rst b/docs/language/gapic/v1beta2/api.rst new file mode 100644 index 000000000000..330d7e6e7a78 --- /dev/null +++ b/docs/language/gapic/v1beta2/api.rst @@ -0,0 +1,6 @@ +Natural Language Beta Client API +================================ + +.. automodule:: google.cloud.language_v1beta2 + :members: + :inherited-members: diff --git a/docs/language/gapic/v1beta2/types.rst b/docs/language/gapic/v1beta2/types.rst new file mode 100644 index 000000000000..d9a7eb171f00 --- /dev/null +++ b/docs/language/gapic/v1beta2/types.rst @@ -0,0 +1,5 @@ +Natural Language Beta Client Types +================================== + +.. automodule:: google.cloud.language_v1beta2.types + :members: diff --git a/docs/language/responses.rst b/docs/language/responses.rst deleted file mode 100644 index 5584cbcdcfab..000000000000 --- a/docs/language/responses.rst +++ /dev/null @@ -1,37 +0,0 @@ -Natural Language Response Classes -================================= - -Responses -~~~~~~~~~ - -.. automodule:: google.cloud.language.api_responses - :members: - :show-inheritance: - -Sentences -~~~~~~~~~ - -.. automodule:: google.cloud.language.sentence - :members: - :show-inheritance: - -Entity -~~~~~~ - -.. automodule:: google.cloud.language.entity - :members: - :show-inheritance: - -Sentiment -~~~~~~~~~ - -.. automodule:: google.cloud.language.sentiment - :members: - :show-inheritance: - -Syntax -~~~~~~ - -.. automodule:: google.cloud.language.syntax - :members: - :show-inheritance: diff --git a/docs/language/usage.rst b/docs/language/usage.rst index 2a8c9ddba589..31d4bb20b95c 100644 --- a/docs/language/usage.rst +++ b/docs/language/usage.rst @@ -1,14 +1,6 @@ Natural Language ================ -.. toctree:: - :maxdepth: 2 - :hidden: - - client - document - responses - The `Google Natural Language`_ API can be used to reveal the structure and meaning of text via powerful machine learning models. You can use it to extract information about @@ -21,40 +13,43 @@ with your document storage on Google Cloud Storage. .. _Google Natural Language: https://cloud.google.com/natural-language/docs/getting-started -Client ------- -:class:`~google.cloud.language.client.Client` objects provide a -means to configure your application. Each instance holds -an authenticated connection to the Natural Language service. +******************************** +Authentication and Configuration +******************************** -For an overview of authentication in ``google-cloud-python``, see -:doc:`/core/auth`. +- For an overview of authentication in ``google-cloud-python``, + see :doc:`/core/auth`. -Assuming your environment is set up as described in that document, -create an instance of :class:`~google.cloud.language.client.Client`. +- In addition to any authentication configuration, you should also set the + :envvar:`GOOGLE_CLOUD_PROJECT` environment variable for the project you'd + like to interact with. If the :envvar:`GOOGLE_CLOUD_PROJECT` environment + variable is not present, the project ID from JSON file credentials is used. - .. code-block:: python + If you are using Google App Engine or Google Compute Engine + this will be detected automatically. - >>> from google.cloud import language - >>> client = language.Client() +- After configuring your environment, create a + :class:`~google.cloud.language_v1.LanguageServiceClient`. -By default the ``language`` is ``'en-US'`` and the ``encoding`` is -UTF-8. To over-ride these values: +.. code-block:: python - .. code-block:: python + >>> from google.cloud import language + >>> client = language.LanguageServiceClient() - >>> document = client.document_from_text( - ... text_content, language='es', encoding=language.Encoding.UTF16) +or pass in ``credentials`` explicitly. +.. code-block:: python + + >>> from google.cloud import language + >>> client = language.LanguageServiceClient( + ... credentials=creds, + ... ) -The encoding can be one of -:attr:`Encoding.UTF8 `, -:attr:`Encoding.UTF16 `, or -:attr:`Encoding.UTF32 `. -Methods -------- +********* +Documents +********* The Google Natural Language API has three supported methods @@ -62,109 +57,90 @@ The Google Natural Language API has three supported methods - `analyzeSentiment`_ - `annotateText`_ -and each method uses a `Document`_ for representing text. To -create a :class:`~google.cloud.language.document.Document`, +and each method uses a :class:`~.language_v1.types.Document` for representing +text. .. code-block:: python - >>> text_content = ( - ... 'Google, headquartered in Mountain View, unveiled the ' - ... 'new Android phone at the Consumer Electronic Show. ' - ... 'Sundar Pichai said in his keynote that users love ' - ... 'their new Android phones.') - >>> document = client.document_from_text(text_content) + >>> document = language.types.Document( + ... content='Google, headquartered in Mountain View, unveiled the ' + ... 'new Android phone at the Consumer Electronic Show. ' + ... 'Sundar Pichai said in his keynote that users love ' + ... 'their new Android phones.', + ... language='en', + ... type='PLAIN_TEXT', + ... ) -By using :meth:`~google.cloud.language.client.Client.document_from_text`, -the document's type is plain text: - - .. code-block:: python - - >>> document.doc_type == language.Document.PLAIN_TEXT - True The document's language defaults to ``None``, which will cause the API to auto-detect the language. -In addition, the -:meth:`~google.cloud.language.client.Client.document_from_html`, -factory can be used to created an HTML document. In this -method and the from text method, the language can be -over-ridden: +In addition, you can construct an HTML document: .. code-block:: python - >>> html_content = """\ - ... - ... - ... El Tiempo de las Historias</time> - ... </head> - ... <body> - ... <p>La vaca saltó sobre la luna.</p> - ... </body> - ... </html> - ... """ - >>> document = client.document_from_html(html_content, - ... language='es') + >>> html_content = """\ + ... <html> + ... <head> + ... <title>El Tiempo de las Historias</time> + ... </head> + ... <body> + ... <p>La vaca saltó sobre la luna.</p> + ... </body> + ... </html> + ... """ + >>> document = language.types.Document( + ... content=html_content, + ... language='es', + ... type='HTML', + ... ) The ``language`` argument can be either ISO-639-1 or BCP-47 language -codes; at the time, only English, Spanish, and Japanese `are supported`_. -However, the ``analyzeSentiment`` method `only supports`_ English text. +codes. The API reference page contains the full list of `supported languages`_. -.. _are supported: https://cloud.google.com/natural-language/docs/ -.. _only supports: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/analyzeSentiment#body.request_body.FIELDS.document +.. _supported languages: https://cloud.google.com/natural-language/docs/languages -The document type (``doc_type``) value can be one of -:attr:`Document.PLAIN_TEXT <google.cloud.language.document.Document.PLAIN_TEXT>` or -:attr:`Document.HTML <google.cloud.language.document.Document.HTML>`. In addition to supplying the text / HTML content, a document can refer -to content stored in `Google Cloud Storage`_. We can use the -:meth:`~google.cloud.language.client.Client.document_from_url` method: - - .. code-block:: python - - >>> gcs_url = 'gs://my-text-bucket/sentiment-me.txt' - >>> document = client.document_from_url( - ... gcs_url, doc_type=language.Document.HTML) - >>> document.gcs_url == gcs_url - True - >>> document.doc_type == language.Document.PLAIN_TEXT - True - -The document type can be specified with the ``doc_type`` argument: +to content stored in `Google Cloud Storage`_. .. code-block:: python - >>> document = client.document_from_url( - ... gcs_url, doc_type=language.Document.HTML) + >>> document = language.types.Document( + ... gcs_content_uri='gs://my-text-bucket/sentiment-me.txt', + ... type=language.enums.HTML, + ... ) .. _analyzeEntities: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/analyzeEntities .. _analyzeSentiment: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/analyzeSentiment .. _annotateText: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/annotateText -.. _Document: https://cloud.google.com/natural-language/reference/rest/v1beta1/Document .. _Google Cloud Storage: https://cloud.google.com/storage/ +**************** Analyze Entities ----------------- +**************** -The :meth:`~google.cloud.language.document.Document.analyze_entities` method -finds named entities (i.e. proper names) in the text and returns them -as a :class:`list` of :class:`~google.cloud.language.entity.Entity` objects. -Each entity has a corresponding type, salience (prominence), associated -metadata and other properties. +The :meth:`~.language_v1.LanguageServiceClient.analyze_entities` +method finds named entities (i.e. proper names) in the text. This method +returns a :class:`~.language_v1.types.AnalyzeEntitiesResponse`. .. code-block:: python - >>> text_content = ("Michelangelo Caravaggio, Italian painter, is " - ... "known for 'The Calling of Saint Matthew'.") - >>> document = client.document_from_text(text_content) - >>> entity_response = document.analyze_entities() - >>> for entity in entity_response.entities: + >>> document = language.types.Document( + ... content='Michelangelo Caravaggio, Italian painter, is ' + ... 'known for "The Calling of Saint Matthew".', + ... type=language.enums.Type.PLAIN_TEXT, + ... ) + >>> response = client.analyze_entities( + ... document=document, + ... encoding_type='UTF32', + ... ) + >>> for entity in response.entities: ... print('=' * 20) - ... print(' name: %s' % (entity.name,)) - ... print(' type: %s' % (entity.entity_type,)) - ... print(' metadata: %s' % (entity.metadata,)) - ... print(' salience: %s' % (entity.salience,)) + ... print(' name: {0}'.format(entity.name)) + ... print(' type: {0}'.format(entity.entity_type)) + ... print(' metadata: {0}'.format(entity.metadata)) + ... print(' salience: {0}'.format(entity.salience)) ==================== name: Michelangelo Caravaggio type: PERSON @@ -181,90 +157,84 @@ metadata and other properties. metadata: {'wikipedia_url': 'http://en.wikipedia.org/wiki/Caravaggio'} salience: 0.038798928 +.. note:: + + It is recommended to send an ``encoding_type`` argument to Natural + Language methods, so they provide useful offsets for the data they return. + While the correct value varies by environment, in Python you *usually* + want ``UTF32``. + + +***************** Analyze Sentiment ------------------ +***************** -The :meth:`~google.cloud.language.document.Document.analyze_sentiment` method -analyzes the sentiment of the provided text and returns a -:class:`~google.cloud.language.sentiment.Sentiment`. Currently, this method -only supports English text. +The :meth:`~.language_v1.LanguageServiceClient.analyze_sentiment` method +analyzes the sentiment of the provided text. This method returns a +:class:`~.language_v1.types.AnalyzeSentimentResponse`. .. code-block:: python - >>> text_content = "Jogging isn't very fun." - >>> document = client.document_from_text(text_content) - >>> sentiment_response = document.analyze_sentiment() - >>> sentiment = sentiment_response.sentiment + >>> document = language.types.Document( + ... content='Jogging is not very fun.', + ... type='PLAIN_TEXT', + ... ) + >>> response = client.analyze_sentiment( + ... document=document, + ... encoding_type='UTF32', + ... ) + >>> sentiment = response.document_sentiment >>> print(sentiment.score) -1 >>> print(sentiment.magnitude) 0.8 +.. note:: + + It is recommended to send an ``encoding_type`` argument to Natural + Language methods, so they provide useful offsets for the data they return. + While the correct value varies by environment, in Python you *usually* + want ``UTF32``. + + +************* Annotate Text -------------- +************* -The :meth:`~google.cloud.language.document.Document.annotate_text` method +The :meth:`~.language_v1.LanguageServiceClient.annotate_text` method analyzes a document and is intended for users who are familiar with -machine learning and need in-depth text features to build upon. - -The method returns a named tuple with four entries: - -* ``sentences``: A :class:`list` of sentences in the text -* ``tokens``: A :class:`list` of :class:`~google.cloud.language.syntax.Token` - object (e.g. words, punctuation) -* ``sentiment``: The :class:`~google.cloud.language.sentiment.Sentiment` of - the text (as returned by - :meth:`~google.cloud.language.document.Document.analyze_sentiment`) -* ``entities``: :class:`list` of :class:`~google.cloud.language.entity.Entity` - objects extracted from the text (as returned by - :meth:`~google.cloud.language.document.Document.analyze_entities`) - -By default :meth:`~google.cloud.language.document.Document.annotate_text` has -three arguments ``include_syntax``, ``include_entities`` and -``include_sentiment`` which are all :data:`True`. However, each of these -`Features`_ can be selectively turned off by setting the corresponding -arguments to :data:`False`. - -When ``include_syntax=False``, ``sentences`` and ``tokens`` in the -response is :data:`None`. When ``include_sentiment=False``, ``sentiment`` in -the response is :data:`None`. When ``include_entities=False``, ``entities`` in -the response is :data:`None`. +machine learning and need in-depth text features to build upon. This method +returns a :class:`~.language_v1.types.AnnotateTextResponse`. - .. code-block:: python - >>> text_content = 'The cow jumped over the Moon.' - >>> document = client.document_from_text(text_content) - >>> annotations = document.annotate_text() - >>> # Sentences present if include_syntax=True - >>> print(annotations.sentences) - ['The cow jumped over the Moon.'] - >>> # Tokens present if include_syntax=True - >>> for token in annotations.tokens: - ... msg = '%11s: %s' % (token.part_of_speech, token.text_content) - ... print(msg) - DETERMINER: The - NOUN: cow - VERB: jumped - ADPOSITION: over - DETERMINER: the - NOUN: Moon - PUNCTUATION: . - >>> # Sentiment present if include_sentiment=True - >>> print(annotations.sentiment.score) - 1 - >>> print(annotations.sentiment.magnitude) - 0.1 - >>> # Entities present if include_entities=True - >>> for entity in annotations.entities: - ... print('=' * 20) - ... print(' name: %s' % (entity.name,)) - ... print(' type: %s' % (entity.entity_type,)) - ... print(' metadata: %s' % (entity.metadata,)) - ... print(' salience: %s' % (entity.salience,)) - ==================== - name: Moon - type: LOCATION - metadata: {'wikipedia_url': 'http://en.wikipedia.org/wiki/Natural_satellite'} - salience: 0.11793101 +************* +API Reference +************* + +This package includes clients for multiple versions of the Natural Language +API. By default, you will get ``v1``, the latest GA version. + +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types + +If you are interested in beta features ahead of the latest GA, you may +opt-in to the v1.1 beta, which is spelled ``v1beta2``. In order to do this, +you will want to import from ``google.cloud.language_v1beta2`` in lieu of +``google.cloud.language``. + +An API and type reference is provided for the v1.1 beta also: + +.. toctree:: + :maxdepth: 2 + + gapic/v1beta2/api + gapic/v1beta2/types + +.. note:: -.. _Features: https://cloud.google.com/natural-language/docs/reference/rest/v1beta1/documents/annotateText#Features + The client for the beta API is provided on a provisional basis. The API + surface is subject to change, and it is possible that this client will be + deprecated or removed after its features become GA. diff --git a/docs/vision/gapic/api.rst b/docs/vision/gapic/v1/api.rst similarity index 100% rename from docs/vision/gapic/api.rst rename to docs/vision/gapic/v1/api.rst diff --git a/docs/vision/gapic/types.rst b/docs/vision/gapic/v1/types.rst similarity index 100% rename from docs/vision/gapic/types.rst rename to docs/vision/gapic/v1/types.rst diff --git a/docs/vision/index.rst b/docs/vision/index.rst index b6d6f17aa2d6..c69240f792bd 100644 --- a/docs/vision/index.rst +++ b/docs/vision/index.rst @@ -33,19 +33,21 @@ Authentication and Configuration this will be detected automatically. - After configuring your environment, create a - :class:`~google.cloud.vision.client.Client`. + :class:`~google.cloud.vision_v1.ImageAnnotatorClient`. .. code-block:: python >>> from google.cloud import vision >>> client = vision.ImageAnnotatorClient() -or pass in ``credentials`` and ``project`` explicitly. +or pass in ``credentials`` explicitly. .. code-block:: python >>> from google.cloud import vision - >>> client = vision.Client(project='my-project', credentials=creds) + >>> client = vision.ImageAnnotatorClient( + ... credentials=creds, + ... ) ***************** @@ -127,5 +129,5 @@ API Reference .. toctree:: :maxdepth: 2 - gapic/api - gapic/types + gapic/v1/api + gapic/v1/types diff --git a/language/google/cloud/gapic/__init__.py b/language/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/gapic/language/__init__.py b/language/google/cloud/gapic/language/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/gapic/language/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/gapic/language/v1/__init__.py b/language/google/cloud/gapic/language/v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/language/google/cloud/gapic/language/v1/enums.py b/language/google/cloud/gapic/language/v1/enums.py new file mode 100644 index 000000000000..2b53e4d913bb --- /dev/null +++ b/language/google/cloud/gapic/language/v1/enums.py @@ -0,0 +1,516 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class EncodingType(object): + """ + Represents the text encoding that the caller uses to process the output. + Providing an ``EncodingType`` is recommended because the API provides the + beginning offsets for various outputs, such as tokens and mentions, and + languages that natively use different text encodings may access offsets + differently. + + Attributes: + NONE (int): If ``EncodingType`` is not specified, encoding-dependent information (such as + ``begin_offset``) will be set at ``-1``. + UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-8 encoding of the input. C++ and Go are examples of languages + that use this encoding natively. + UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-16 encoding of the input. Java and Javascript are examples of + languages that use this encoding natively. + UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-32 encoding of the input. Python is an example of a language + that uses this encoding natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(object): + class Type(object): + """ + The document types enum. + + Attributes: + TYPE_UNSPECIFIED (int): The content type is not specified. + PLAIN_TEXT (int): Plain text + HTML (int): HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + +class Entity(object): + class Type(object): + """ + The type of the entity. + + Attributes: + UNKNOWN (int): Unknown + PERSON (int): Person + LOCATION (int): Location + ORGANIZATION (int): Organization + EVENT (int): Event + WORK_OF_ART (int): Work of art + CONSUMER_GOOD (int): Consumer goods + OTHER (int): Other types + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + + +class PartOfSpeech(object): + class Tag(object): + """ + The part of speech tags enum. + + Attributes: + UNKNOWN (int): Unknown + ADJ (int): Adjective + ADP (int): Adposition (preposition and postposition) + ADV (int): Adverb + CONJ (int): Conjunction + DET (int): Determiner + NOUN (int): Noun (common and proper) + NUM (int): Cardinal number + PRON (int): Pronoun + PRT (int): Particle or other function word + PUNCT (int): Punctuation + VERB (int): Verb (all tenses and modes) + X (int): Other: foreign words, typos, abbreviations + AFFIX (int): Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(object): + """ + The characteristic of a verb that expresses time flow during an event. + + Attributes: + ASPECT_UNKNOWN (int): Aspect is not applicable in the analyzed language or is not predicted. + PERFECTIVE (int): Perfective + IMPERFECTIVE (int): Imperfective + PROGRESSIVE (int): Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(object): + """ + The grammatical function performed by a noun or pronoun in a phrase, + clause, or sentence. In some languages, other parts of speech, such as + adjective and determiner, take case inflection in agreement with the noun. + + Attributes: + CASE_UNKNOWN (int): Case is not applicable in the analyzed language or is not predicted. + ACCUSATIVE (int): Accusative + ADVERBIAL (int): Adverbial + COMPLEMENTIVE (int): Complementive + DATIVE (int): Dative + GENITIVE (int): Genitive + INSTRUMENTAL (int): Instrumental + LOCATIVE (int): Locative + NOMINATIVE (int): Nominative + OBLIQUE (int): Oblique + PARTITIVE (int): Partitive + PREPOSITIONAL (int): Prepositional + REFLEXIVE_CASE (int): Reflexive + RELATIVE_CASE (int): Relative + VOCATIVE (int): Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(object): + """ + Depending on the language, Form can be categorizing different forms of + verbs, adjectives, adverbs, etc. For example, categorizing inflected + endings of verbs and adjectives or distinguishing between short and long + forms of adjectives and participles + + Attributes: + FORM_UNKNOWN (int): Form is not applicable in the analyzed language or is not predicted. + ADNOMIAL (int): Adnomial + AUXILIARY (int): Auxiliary + COMPLEMENTIZER (int): Complementizer + FINAL_ENDING (int): Final ending + GERUND (int): Gerund + REALIS (int): Realis + IRREALIS (int): Irrealis + SHORT (int): Short form + LONG (int): Long form + ORDER (int): Order form + SPECIFIC (int): Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(object): + """ + Gender classes of nouns reflected in the behaviour of associated words. + + Attributes: + GENDER_UNKNOWN (int): Gender is not applicable in the analyzed language or is not predicted. + FEMININE (int): Feminine + MASCULINE (int): Masculine + NEUTER (int): Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(object): + """ + The grammatical feature of verbs, used for showing modality and attitude. + + Attributes: + MOOD_UNKNOWN (int): Mood is not applicable in the analyzed language or is not predicted. + CONDITIONAL_MOOD (int): Conditional + IMPERATIVE (int): Imperative + INDICATIVE (int): Indicative + INTERROGATIVE (int): Interrogative + JUSSIVE (int): Jussive + SUBJUNCTIVE (int): Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(object): + """ + Count distinctions. + + Attributes: + NUMBER_UNKNOWN (int): Number is not applicable in the analyzed language or is not predicted. + SINGULAR (int): Singular + PLURAL (int): Plural + DUAL (int): Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(object): + """ + The distinction between the speaker, second person, third person, etc. + + Attributes: + PERSON_UNKNOWN (int): Person is not applicable in the analyzed language or is not predicted. + FIRST (int): First + SECOND (int): Second + THIRD (int): Third + REFLEXIVE_PERSON (int): Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(object): + """ + This category shows if the token is part of a proper name. + + Attributes: + PROPER_UNKNOWN (int): Proper is not applicable in the analyzed language or is not predicted. + PROPER (int): Proper + NOT_PROPER (int): Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(object): + """ + Reciprocal features of a pronoun. + + Attributes: + RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not + predicted. + RECIPROCAL (int): Reciprocal + NON_RECIPROCAL (int): Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(object): + """ + Time reference. + + Attributes: + TENSE_UNKNOWN (int): Tense is not applicable in the analyzed language or is not predicted. + CONDITIONAL_TENSE (int): Conditional + FUTURE (int): Future + PAST (int): Past + PRESENT (int): Present + IMPERFECT (int): Imperfect + PLUPERFECT (int): Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(object): + """ + The relationship between the action that a verb expresses and the + participants identified by its arguments. + + Attributes: + VOICE_UNKNOWN (int): Voice is not applicable in the analyzed language or is not predicted. + ACTIVE (int): Active + CAUSATIVE (int): Causative + PASSIVE (int): Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + +class DependencyEdge(object): + class Label(object): + """ + The parse label enum for the token. + + Attributes: + UNKNOWN (int): Unknown + ABBREV (int): Abbreviation modifier + ACOMP (int): Adjectival complement + ADVCL (int): Adverbial clause modifier + ADVMOD (int): Adverbial modifier + AMOD (int): Adjectival modifier of an NP + APPOS (int): Appositional modifier of an NP + ATTR (int): Attribute dependent of a copular verb + AUX (int): Auxiliary (non-main) verb + AUXPASS (int): Passive auxiliary + CC (int): Coordinating conjunction + CCOMP (int): Clausal complement of a verb or adjective + CONJ (int): Conjunct + CSUBJ (int): Clausal subject + CSUBJPASS (int): Clausal passive subject + DEP (int): Dependency (unable to determine) + DET (int): Determiner + DISCOURSE (int): Discourse + DOBJ (int): Direct object + EXPL (int): Expletive + GOESWITH (int): Goes with (part of a word in a text not well edited) + IOBJ (int): Indirect object + MARK (int): Marker (word introducing a subordinate clause) + MWE (int): Multi-word expression + MWV (int): Multi-word verbal expression + NEG (int): Negation modifier + NN (int): Noun compound modifier + NPADVMOD (int): Noun phrase used as an adverbial modifier + NSUBJ (int): Nominal subject + NSUBJPASS (int): Passive nominal subject + NUM (int): Numeric modifier of a noun + NUMBER (int): Element of compound number + P (int): Punctuation mark + PARATAXIS (int): Parataxis relation + PARTMOD (int): Participial modifier + PCOMP (int): The complement of a preposition is a clause + POBJ (int): Object of a preposition + POSS (int): Possession modifier + POSTNEG (int): Postverbal negative particle + PRECOMP (int): Predicate complement + PRECONJ (int): Preconjunt + PREDET (int): Predeterminer + PREF (int): Prefix + PREP (int): Prepositional modifier + PRONL (int): The relationship between a verb and verbal morpheme + PRT (int): Particle + PS (int): Associative or possessive marker + QUANTMOD (int): Quantifier phrase modifier + RCMOD (int): Relative clause modifier + RCMODREL (int): Complementizer in relative clause + RDROP (int): Ellipsis without a preceding predicate + REF (int): Referent + REMNANT (int): Remnant + REPARANDUM (int): Reparandum + ROOT (int): Root + SNUM (int): Suffix specifying a unit of number + SUFF (int): Suffix + TMOD (int): Temporal modifier + TOPIC (int): Topic marker + VMOD (int): Clause headed by an infinite form of the verb that modifies a noun + VOCATIVE (int): Vocative + XCOMP (int): Open clausal complement + SUFFIX (int): Name suffix + TITLE (int): Name title + ADVPHMOD (int): Adverbial phrase modifier + AUXCAUS (int): Causative auxiliary + AUXVV (int): Helper auxiliary + DTMOD (int): Rentaishi (Prenominal modifier) + FOREIGN (int): Foreign words + KW (int): Keyword + LIST (int): List for chains of comparable items + NOMC (int): Nominalized clause + NOMCSUBJ (int): Nominalized clausal subject + NOMCSUBJPASS (int): Nominalized clausal passive + NUMC (int): Compound of numeric modifier + COP (int): Copula + DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + + +class EntityMention(object): + class Type(object): + """ + The supported types of mentions. + + Attributes: + TYPE_UNKNOWN (int): Unknown + PROPER (int): Proper name + COMMON (int): Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 diff --git a/language/google/cloud/gapic/language/v1/language_service_client.py b/language/google/cloud/gapic/language/v1/language_service_client.py new file mode 100644 index 000000000000..fb55b9568b67 --- /dev/null +++ b/language/google/cloud/gapic/language/v1/language_service_client.py @@ -0,0 +1,290 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/language/v1/language_service.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.language.v1 LanguageService API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.gapic.language.v1 import enums +from google.cloud.proto.language.v1 import language_service_pb2 + + +class LanguageServiceClient(object): + """ + Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + SERVICE_ADDRESS = 'language.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A LanguageServiceClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-language', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'language_service_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.cloud.language.v1.LanguageService', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.language_service_stub = config.create_stub( + language_service_pb2.LanguageServiceStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._analyze_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeSentiment, + settings=defaults['analyze_sentiment']) + self._analyze_entities = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntities, + settings=defaults['analyze_entities']) + self._analyze_syntax = api_callable.create_api_call( + self.language_service_stub.AnalyzeSyntax, + settings=defaults['analyze_syntax']) + self._annotate_text = api_callable.create_api_call( + self.language_service_stub.AnnotateText, + settings=defaults['annotate_text']) + + # Service calls + def analyze_sentiment(self, document, encoding_type=None, options=None): + """ + Analyzes the sentiment of the provided text. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> response = client.analyze_sentiment(document) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate sentence offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeSentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_sentiment(request, options) + + def analyze_entities(self, document, encoding_type, options=None): + """ + Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.gapic.language.v1 import enums + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_entities(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeEntitiesResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entities(request, options) + + def analyze_syntax(self, document, encoding_type, options=None): + """ + Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.gapic.language.v1 import enums + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_syntax(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnalyzeSyntaxResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + return self._analyze_syntax(request, options) + + def annotate_text(self, document, features, encoding_type, options=None): + """ + A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + + Example: + >>> from google.cloud.gapic.language.v1 import language_service_client + >>> from google.cloud.gapic.language.v1 import enums + >>> from google.cloud.proto.language.v1 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> features = language_service_pb2.AnnotateTextRequest.Features() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.annotate_text(document, features, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1.language_service_pb2.Document`): Input document. + features (:class:`google.cloud.proto.language.v1.language_service_pb2.AnnotateTextRequest.Features`): The enabled features. + encoding_type (enum :class:`google.cloud.gapic.language.v1.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1.language_service_pb2.AnnotateTextResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + return self._annotate_text(request, options) diff --git a/language/google/cloud/gapic/language/v1/language_service_client_config.json b/language/google/cloud/gapic/language/v1/language_service_client_config.json new file mode 100644 index 000000000000..202d5b0d427b --- /dev/null +++ b/language/google/cloud/gapic/language/v1/language_service_client_config.json @@ -0,0 +1,46 @@ +{ + "interfaces": { + "google.cloud.language.v1.LanguageService": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "AnalyzeSentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntities": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeSyntax": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnnotateText": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/language/google/cloud/gapic/language/v1beta2/__init__.py b/language/google/cloud/gapic/language/v1beta2/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/language/google/cloud/gapic/language/v1beta2/enums.py b/language/google/cloud/gapic/language/v1beta2/enums.py new file mode 100644 index 000000000000..2b53e4d913bb --- /dev/null +++ b/language/google/cloud/gapic/language/v1beta2/enums.py @@ -0,0 +1,516 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class EncodingType(object): + """ + Represents the text encoding that the caller uses to process the output. + Providing an ``EncodingType`` is recommended because the API provides the + beginning offsets for various outputs, such as tokens and mentions, and + languages that natively use different text encodings may access offsets + differently. + + Attributes: + NONE (int): If ``EncodingType`` is not specified, encoding-dependent information (such as + ``begin_offset``) will be set at ``-1``. + UTF8 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-8 encoding of the input. C++ and Go are examples of languages + that use this encoding natively. + UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-16 encoding of the input. Java and Javascript are examples of + languages that use this encoding natively. + UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based + on the UTF-32 encoding of the input. Python is an example of a language + that uses this encoding natively. + """ + NONE = 0 + UTF8 = 1 + UTF16 = 2 + UTF32 = 3 + + +class Document(object): + class Type(object): + """ + The document types enum. + + Attributes: + TYPE_UNSPECIFIED (int): The content type is not specified. + PLAIN_TEXT (int): Plain text + HTML (int): HTML + """ + TYPE_UNSPECIFIED = 0 + PLAIN_TEXT = 1 + HTML = 2 + + +class Entity(object): + class Type(object): + """ + The type of the entity. + + Attributes: + UNKNOWN (int): Unknown + PERSON (int): Person + LOCATION (int): Location + ORGANIZATION (int): Organization + EVENT (int): Event + WORK_OF_ART (int): Work of art + CONSUMER_GOOD (int): Consumer goods + OTHER (int): Other types + """ + UNKNOWN = 0 + PERSON = 1 + LOCATION = 2 + ORGANIZATION = 3 + EVENT = 4 + WORK_OF_ART = 5 + CONSUMER_GOOD = 6 + OTHER = 7 + + +class PartOfSpeech(object): + class Tag(object): + """ + The part of speech tags enum. + + Attributes: + UNKNOWN (int): Unknown + ADJ (int): Adjective + ADP (int): Adposition (preposition and postposition) + ADV (int): Adverb + CONJ (int): Conjunction + DET (int): Determiner + NOUN (int): Noun (common and proper) + NUM (int): Cardinal number + PRON (int): Pronoun + PRT (int): Particle or other function word + PUNCT (int): Punctuation + VERB (int): Verb (all tenses and modes) + X (int): Other: foreign words, typos, abbreviations + AFFIX (int): Affix + """ + UNKNOWN = 0 + ADJ = 1 + ADP = 2 + ADV = 3 + CONJ = 4 + DET = 5 + NOUN = 6 + NUM = 7 + PRON = 8 + PRT = 9 + PUNCT = 10 + VERB = 11 + X = 12 + AFFIX = 13 + + class Aspect(object): + """ + The characteristic of a verb that expresses time flow during an event. + + Attributes: + ASPECT_UNKNOWN (int): Aspect is not applicable in the analyzed language or is not predicted. + PERFECTIVE (int): Perfective + IMPERFECTIVE (int): Imperfective + PROGRESSIVE (int): Progressive + """ + ASPECT_UNKNOWN = 0 + PERFECTIVE = 1 + IMPERFECTIVE = 2 + PROGRESSIVE = 3 + + class Case(object): + """ + The grammatical function performed by a noun or pronoun in a phrase, + clause, or sentence. In some languages, other parts of speech, such as + adjective and determiner, take case inflection in agreement with the noun. + + Attributes: + CASE_UNKNOWN (int): Case is not applicable in the analyzed language or is not predicted. + ACCUSATIVE (int): Accusative + ADVERBIAL (int): Adverbial + COMPLEMENTIVE (int): Complementive + DATIVE (int): Dative + GENITIVE (int): Genitive + INSTRUMENTAL (int): Instrumental + LOCATIVE (int): Locative + NOMINATIVE (int): Nominative + OBLIQUE (int): Oblique + PARTITIVE (int): Partitive + PREPOSITIONAL (int): Prepositional + REFLEXIVE_CASE (int): Reflexive + RELATIVE_CASE (int): Relative + VOCATIVE (int): Vocative + """ + CASE_UNKNOWN = 0 + ACCUSATIVE = 1 + ADVERBIAL = 2 + COMPLEMENTIVE = 3 + DATIVE = 4 + GENITIVE = 5 + INSTRUMENTAL = 6 + LOCATIVE = 7 + NOMINATIVE = 8 + OBLIQUE = 9 + PARTITIVE = 10 + PREPOSITIONAL = 11 + REFLEXIVE_CASE = 12 + RELATIVE_CASE = 13 + VOCATIVE = 14 + + class Form(object): + """ + Depending on the language, Form can be categorizing different forms of + verbs, adjectives, adverbs, etc. For example, categorizing inflected + endings of verbs and adjectives or distinguishing between short and long + forms of adjectives and participles + + Attributes: + FORM_UNKNOWN (int): Form is not applicable in the analyzed language or is not predicted. + ADNOMIAL (int): Adnomial + AUXILIARY (int): Auxiliary + COMPLEMENTIZER (int): Complementizer + FINAL_ENDING (int): Final ending + GERUND (int): Gerund + REALIS (int): Realis + IRREALIS (int): Irrealis + SHORT (int): Short form + LONG (int): Long form + ORDER (int): Order form + SPECIFIC (int): Specific form + """ + FORM_UNKNOWN = 0 + ADNOMIAL = 1 + AUXILIARY = 2 + COMPLEMENTIZER = 3 + FINAL_ENDING = 4 + GERUND = 5 + REALIS = 6 + IRREALIS = 7 + SHORT = 8 + LONG = 9 + ORDER = 10 + SPECIFIC = 11 + + class Gender(object): + """ + Gender classes of nouns reflected in the behaviour of associated words. + + Attributes: + GENDER_UNKNOWN (int): Gender is not applicable in the analyzed language or is not predicted. + FEMININE (int): Feminine + MASCULINE (int): Masculine + NEUTER (int): Neuter + """ + GENDER_UNKNOWN = 0 + FEMININE = 1 + MASCULINE = 2 + NEUTER = 3 + + class Mood(object): + """ + The grammatical feature of verbs, used for showing modality and attitude. + + Attributes: + MOOD_UNKNOWN (int): Mood is not applicable in the analyzed language or is not predicted. + CONDITIONAL_MOOD (int): Conditional + IMPERATIVE (int): Imperative + INDICATIVE (int): Indicative + INTERROGATIVE (int): Interrogative + JUSSIVE (int): Jussive + SUBJUNCTIVE (int): Subjunctive + """ + MOOD_UNKNOWN = 0 + CONDITIONAL_MOOD = 1 + IMPERATIVE = 2 + INDICATIVE = 3 + INTERROGATIVE = 4 + JUSSIVE = 5 + SUBJUNCTIVE = 6 + + class Number(object): + """ + Count distinctions. + + Attributes: + NUMBER_UNKNOWN (int): Number is not applicable in the analyzed language or is not predicted. + SINGULAR (int): Singular + PLURAL (int): Plural + DUAL (int): Dual + """ + NUMBER_UNKNOWN = 0 + SINGULAR = 1 + PLURAL = 2 + DUAL = 3 + + class Person(object): + """ + The distinction between the speaker, second person, third person, etc. + + Attributes: + PERSON_UNKNOWN (int): Person is not applicable in the analyzed language or is not predicted. + FIRST (int): First + SECOND (int): Second + THIRD (int): Third + REFLEXIVE_PERSON (int): Reflexive + """ + PERSON_UNKNOWN = 0 + FIRST = 1 + SECOND = 2 + THIRD = 3 + REFLEXIVE_PERSON = 4 + + class Proper(object): + """ + This category shows if the token is part of a proper name. + + Attributes: + PROPER_UNKNOWN (int): Proper is not applicable in the analyzed language or is not predicted. + PROPER (int): Proper + NOT_PROPER (int): Not proper + """ + PROPER_UNKNOWN = 0 + PROPER = 1 + NOT_PROPER = 2 + + class Reciprocity(object): + """ + Reciprocal features of a pronoun. + + Attributes: + RECIPROCITY_UNKNOWN (int): Reciprocity is not applicable in the analyzed language or is not + predicted. + RECIPROCAL (int): Reciprocal + NON_RECIPROCAL (int): Non-reciprocal + """ + RECIPROCITY_UNKNOWN = 0 + RECIPROCAL = 1 + NON_RECIPROCAL = 2 + + class Tense(object): + """ + Time reference. + + Attributes: + TENSE_UNKNOWN (int): Tense is not applicable in the analyzed language or is not predicted. + CONDITIONAL_TENSE (int): Conditional + FUTURE (int): Future + PAST (int): Past + PRESENT (int): Present + IMPERFECT (int): Imperfect + PLUPERFECT (int): Pluperfect + """ + TENSE_UNKNOWN = 0 + CONDITIONAL_TENSE = 1 + FUTURE = 2 + PAST = 3 + PRESENT = 4 + IMPERFECT = 5 + PLUPERFECT = 6 + + class Voice(object): + """ + The relationship between the action that a verb expresses and the + participants identified by its arguments. + + Attributes: + VOICE_UNKNOWN (int): Voice is not applicable in the analyzed language or is not predicted. + ACTIVE (int): Active + CAUSATIVE (int): Causative + PASSIVE (int): Passive + """ + VOICE_UNKNOWN = 0 + ACTIVE = 1 + CAUSATIVE = 2 + PASSIVE = 3 + + +class DependencyEdge(object): + class Label(object): + """ + The parse label enum for the token. + + Attributes: + UNKNOWN (int): Unknown + ABBREV (int): Abbreviation modifier + ACOMP (int): Adjectival complement + ADVCL (int): Adverbial clause modifier + ADVMOD (int): Adverbial modifier + AMOD (int): Adjectival modifier of an NP + APPOS (int): Appositional modifier of an NP + ATTR (int): Attribute dependent of a copular verb + AUX (int): Auxiliary (non-main) verb + AUXPASS (int): Passive auxiliary + CC (int): Coordinating conjunction + CCOMP (int): Clausal complement of a verb or adjective + CONJ (int): Conjunct + CSUBJ (int): Clausal subject + CSUBJPASS (int): Clausal passive subject + DEP (int): Dependency (unable to determine) + DET (int): Determiner + DISCOURSE (int): Discourse + DOBJ (int): Direct object + EXPL (int): Expletive + GOESWITH (int): Goes with (part of a word in a text not well edited) + IOBJ (int): Indirect object + MARK (int): Marker (word introducing a subordinate clause) + MWE (int): Multi-word expression + MWV (int): Multi-word verbal expression + NEG (int): Negation modifier + NN (int): Noun compound modifier + NPADVMOD (int): Noun phrase used as an adverbial modifier + NSUBJ (int): Nominal subject + NSUBJPASS (int): Passive nominal subject + NUM (int): Numeric modifier of a noun + NUMBER (int): Element of compound number + P (int): Punctuation mark + PARATAXIS (int): Parataxis relation + PARTMOD (int): Participial modifier + PCOMP (int): The complement of a preposition is a clause + POBJ (int): Object of a preposition + POSS (int): Possession modifier + POSTNEG (int): Postverbal negative particle + PRECOMP (int): Predicate complement + PRECONJ (int): Preconjunt + PREDET (int): Predeterminer + PREF (int): Prefix + PREP (int): Prepositional modifier + PRONL (int): The relationship between a verb and verbal morpheme + PRT (int): Particle + PS (int): Associative or possessive marker + QUANTMOD (int): Quantifier phrase modifier + RCMOD (int): Relative clause modifier + RCMODREL (int): Complementizer in relative clause + RDROP (int): Ellipsis without a preceding predicate + REF (int): Referent + REMNANT (int): Remnant + REPARANDUM (int): Reparandum + ROOT (int): Root + SNUM (int): Suffix specifying a unit of number + SUFF (int): Suffix + TMOD (int): Temporal modifier + TOPIC (int): Topic marker + VMOD (int): Clause headed by an infinite form of the verb that modifies a noun + VOCATIVE (int): Vocative + XCOMP (int): Open clausal complement + SUFFIX (int): Name suffix + TITLE (int): Name title + ADVPHMOD (int): Adverbial phrase modifier + AUXCAUS (int): Causative auxiliary + AUXVV (int): Helper auxiliary + DTMOD (int): Rentaishi (Prenominal modifier) + FOREIGN (int): Foreign words + KW (int): Keyword + LIST (int): List for chains of comparable items + NOMC (int): Nominalized clause + NOMCSUBJ (int): Nominalized clausal subject + NOMCSUBJPASS (int): Nominalized clausal passive + NUMC (int): Compound of numeric modifier + COP (int): Copula + DISLOCATED (int): Dislocated relation (for fronted/topicalized elements) + """ + UNKNOWN = 0 + ABBREV = 1 + ACOMP = 2 + ADVCL = 3 + ADVMOD = 4 + AMOD = 5 + APPOS = 6 + ATTR = 7 + AUX = 8 + AUXPASS = 9 + CC = 10 + CCOMP = 11 + CONJ = 12 + CSUBJ = 13 + CSUBJPASS = 14 + DEP = 15 + DET = 16 + DISCOURSE = 17 + DOBJ = 18 + EXPL = 19 + GOESWITH = 20 + IOBJ = 21 + MARK = 22 + MWE = 23 + MWV = 24 + NEG = 25 + NN = 26 + NPADVMOD = 27 + NSUBJ = 28 + NSUBJPASS = 29 + NUM = 30 + NUMBER = 31 + P = 32 + PARATAXIS = 33 + PARTMOD = 34 + PCOMP = 35 + POBJ = 36 + POSS = 37 + POSTNEG = 38 + PRECOMP = 39 + PRECONJ = 40 + PREDET = 41 + PREF = 42 + PREP = 43 + PRONL = 44 + PRT = 45 + PS = 46 + QUANTMOD = 47 + RCMOD = 48 + RCMODREL = 49 + RDROP = 50 + REF = 51 + REMNANT = 52 + REPARANDUM = 53 + ROOT = 54 + SNUM = 55 + SUFF = 56 + TMOD = 57 + TOPIC = 58 + VMOD = 59 + VOCATIVE = 60 + XCOMP = 61 + SUFFIX = 62 + TITLE = 63 + ADVPHMOD = 64 + AUXCAUS = 65 + AUXVV = 66 + DTMOD = 67 + FOREIGN = 68 + KW = 69 + LIST = 70 + NOMC = 71 + NOMCSUBJ = 72 + NOMCSUBJPASS = 73 + NUMC = 74 + COP = 75 + DISLOCATED = 76 + + +class EntityMention(object): + class Type(object): + """ + The supported types of mentions. + + Attributes: + TYPE_UNKNOWN (int): Unknown + PROPER (int): Proper name + COMMON (int): Common noun (or noun compound) + """ + TYPE_UNKNOWN = 0 + PROPER = 1 + COMMON = 2 diff --git a/language/google/cloud/gapic/language/v1beta2/language_service_client.py b/language/google/cloud/gapic/language/v1beta2/language_service_client.py new file mode 100644 index 000000000000..a990d2a9758a --- /dev/null +++ b/language/google/cloud/gapic/language/v1beta2/language_service_client.py @@ -0,0 +1,326 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/cloud/language/v1beta2/language_service.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.cloud.language.v1beta2 LanguageService API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.gapic.language.v1beta2 import enums +from google.cloud.proto.language.v1beta2 import language_service_pb2 + + +class LanguageServiceClient(object): + """ + Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + SERVICE_ADDRESS = 'language.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A LanguageServiceClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-language', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'language_service_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.cloud.language.v1beta2.LanguageService', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, ) + self.language_service_stub = config.create_stub( + language_service_pb2.LanguageServiceStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._analyze_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeSentiment, + settings=defaults['analyze_sentiment']) + self._analyze_entities = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntities, + settings=defaults['analyze_entities']) + self._analyze_entity_sentiment = api_callable.create_api_call( + self.language_service_stub.AnalyzeEntitySentiment, + settings=defaults['analyze_entity_sentiment']) + self._analyze_syntax = api_callable.create_api_call( + self.language_service_stub.AnalyzeSyntax, + settings=defaults['analyze_syntax']) + self._annotate_text = api_callable.create_api_call( + self.language_service_stub.AnnotateText, + settings=defaults['annotate_text']) + + # Service calls + def analyze_sentiment(self, document, encoding_type=None, options=None): + """ + Analyzes the sentiment of the provided text. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> response = client.analyze_sentiment(document) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate sentence offsets for the + sentence sentiment. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_sentiment(request, options) + + def analyze_entities(self, document, encoding_type, options=None): + """ + Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_entities(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitiesResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entities(request, options) + + def analyze_entity_sentiment(self, document, encoding_type, options=None): + """ + Finds entities, similar to ``AnalyzeEntities`` in the text and analyzes + sentiment associated with each entity and its mentions. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_entity_sentiment(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeEntitySentimentResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeEntitySentimentRequest( + document=document, encoding_type=encoding_type) + return self._analyze_entity_sentiment(request, options) + + def analyze_syntax(self, document, encoding_type, options=None): + """ + Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.analyze_syntax(document, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnalyzeSyntaxResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + return self._analyze_syntax(request, options) + + def annotate_text(self, document, features, encoding_type, options=None): + """ + A convenience method that provides all syntax, sentiment, and entity + features in one call. + + Example: + >>> from google.cloud.gapic.language.v1beta2 import language_service_client + >>> from google.cloud.gapic.language.v1beta2 import enums + >>> from google.cloud.proto.language.v1beta2 import language_service_pb2 + >>> client = language_service_client.LanguageServiceClient() + >>> document = language_service_pb2.Document() + >>> features = language_service_pb2.AnnotateTextRequest.Features() + >>> encoding_type = enums.EncodingType.NONE + >>> response = client.annotate_text(document, features, encoding_type) + + Args: + document (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.Document`): Input document. + features (:class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextRequest.Features`): The enabled features. + encoding_type (enum :class:`google.cloud.gapic.language.v1beta2.enums.EncodingType`): The encoding type used by the API to calculate offsets. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.language.v1beta2.language_service_pb2.AnnotateTextResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + return self._annotate_text(request, options) diff --git a/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json b/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json new file mode 100644 index 000000000000..8018f8a7bbf5 --- /dev/null +++ b/language/google/cloud/gapic/language/v1beta2/language_service_client_config.json @@ -0,0 +1,51 @@ +{ + "interfaces": { + "google.cloud.language.v1beta2.LanguageService": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "AnalyzeSentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntities": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeEntitySentiment": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnalyzeSyntax": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "AnnotateText": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/language/google/cloud/language/__init__.py b/language/google/cloud/language/__init__.py index 8cc584b17cb8..8bba28ead739 100644 --- a/language/google/cloud/language/__init__.py +++ b/language/google/cloud/language/__init__.py @@ -12,14 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. +# ----------------------------------------------------------------------------- +# TRANSITION CODE +# ----------------------------------------------------------------------------- +# The old Language manual layer is now deprecated, but to allow +# users the time to move from the manual layer to the mostly auto-generated +# layer, they are both living side by side for a few months. +# +# Instantiating the old manual layer (`google.cloud.language.Client`) will +# issue a DeprecationWarning. +# +# When it comes time to remove the old layer, everything in this directory +# should go away EXCEPT __init__.py (which can be renamed to language.py and +# put one directory above). +# +# Additionally, the import and export of `Client`, `Document`, and `Encoding` +# should be removed from this file (along with this note), and the rest should +# be left intact. +# ----------------------------------------------------------------------------- + """Client library for Google Cloud Natural Language API.""" +from __future__ import absolute_import from pkg_resources import get_distribution __version__ = get_distribution('google-cloud-language').version +from google.cloud.language_v1 import * # noqa + from google.cloud.language.client import Client from google.cloud.language.document import Document from google.cloud.language.document import Encoding -__all__ = ['Client', 'Document', 'Encoding', '__version__'] +__all__ = ( + # Common + '__version__', + + # Manual Layer + 'Client', + 'Document', + 'Encoding', + + # Auto-gen + 'enums', + 'LanguageServiceClient', + 'types', +) diff --git a/language/google/cloud/language/client.py b/language/google/cloud/language/client.py index da6ea90c156b..58066443c844 100644 --- a/language/google/cloud/language/client.py +++ b/language/google/cloud/language/client.py @@ -52,6 +52,16 @@ class Client(client_module.Client): } def __init__(self, credentials=None, api_version='v1', _http=None): + + # Add a deprecation warning for this class. + warnings.warn( + 'This client class and objects that derive from it have been ' + 'deprecated. Use `google.cloud.language.LanguageServiceClient` ' + '(provided by this package) instead. This client will be removed ' + 'in a future release.', + DeprecationWarning, + ) + super(Client, self).__init__( credentials=credentials, _http=_http) ConnectionClass = self._CONNECTION_CLASSES[api_version] diff --git a/language/google/cloud/language_v1/__init__.py b/language/google/cloud/language_v1/__init__.py new file mode 100644 index 000000000000..a5666eadb5c7 --- /dev/null +++ b/language/google/cloud/language_v1/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.gapic.language.v1 import language_service_client as lsc +from google.cloud.gapic.language.v1 import enums + +from google.cloud.language_v1 import types + + +LanguageServiceClient = lsc.LanguageServiceClient + + +__all__ = ( + 'enums', + 'LanguageServiceClient', + 'types', +) diff --git a/language/google/cloud/language_v1/types.py b/language/google/cloud/language_v1/types.py new file mode 100644 index 000000000000..6223f6846e09 --- /dev/null +++ b/language/google/cloud/language_v1/types.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.cloud.proto.language.v1 import language_service_pb2 + +from google.gax.utils.messages import get_messages + + +names = [] +for name, message in get_messages(language_service_pb2).items(): + message.__module__ = 'google.cloud.language_v1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/language/google/cloud/language_v1beta2/__init__.py b/language/google/cloud/language_v1beta2/__init__.py new file mode 100644 index 000000000000..e0a3e4cc287a --- /dev/null +++ b/language/google/cloud/language_v1beta2/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.gapic.language.v1beta2 import language_service_client as lsc +from google.cloud.gapic.language.v1beta2 import enums + +from google.cloud.language_v1beta2 import types + + +LanguageServiceClient = lsc.LanguageServiceClient + + +__all__ = ( + 'enums', + 'LanguageServiceClient', + 'types', +) diff --git a/language/google/cloud/language_v1beta2/types.py b/language/google/cloud/language_v1beta2/types.py new file mode 100644 index 000000000000..557d05aeb001 --- /dev/null +++ b/language/google/cloud/language_v1beta2/types.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.cloud.proto.language.v1beta2 import language_service_pb2 + +from google.gax.utils.messages import get_messages + + +names = [] +for name, message in get_messages(language_service_pb2).items(): + message.__module__ = 'google.cloud.language_v1beta2.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/language/google/cloud/proto/__init__.py b/language/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/proto/language/__init__.py b/language/google/cloud/proto/language/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/language/google/cloud/proto/language/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/language/google/cloud/proto/language/v1/__init__.py b/language/google/cloud/proto/language/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/language/google/cloud/proto/language/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/language/google/cloud/proto/language/v1/language_service_pb2.py b/language/google/cloud/proto/language/v1/language_service_pb2.py new file mode 100644 index 000000000000..98d59f56272c --- /dev/null +++ b/language/google/cloud/proto/language/v1/language_service_pb2.py @@ -0,0 +1,2647 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/language/v1/language_service.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/language/v1/language_service.proto', + package='google.cloud.language.v1', + syntax='proto3', + serialized_pb=_b('\n5google/cloud/proto/language/v1/language_service.proto\x12\x18google.cloud.language.v1\x1a\x1cgoogle/api/annotations.proto\"\xc3\x01\n\x08\x44ocument\x12\x35\n\x04type\x18\x01 \x01(\x0e\x32\'.google.cloud.language.v1.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"t\n\x08Sentence\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12\x36\n\tsentiment\x18\x02 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\"\x86\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x04type\x18\x02 \x01(\x0e\x32%.google.cloud.language.v1.Entity.Type\x12@\n\x08metadata\x18\x03 \x03(\x0b\x32..google.cloud.language.v1.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12\x39\n\x08mentions\x18\x05 \x03(\x0b\x32\'.google.cloud.language.v1.EntityMention\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xcb\x01\n\x05Token\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12>\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32&.google.cloud.language.v1.PartOfSpeech\x12\x41\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xa3\x10\n\x0cPartOfSpeech\x12\x37\n\x03tag\x18\x01 \x01(\x0e\x32*.google.cloud.language.v1.PartOfSpeech.Tag\x12=\n\x06\x61spect\x18\x02 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Aspect\x12\x39\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Case\x12\x39\n\x04\x66orm\x18\x04 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Form\x12=\n\x06gender\x18\x05 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Gender\x12\x39\n\x04mood\x18\x06 \x01(\x0e\x32+.google.cloud.language.v1.PartOfSpeech.Mood\x12=\n\x06number\x18\x07 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Number\x12=\n\x06person\x18\x08 \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Person\x12=\n\x06proper\x18\t \x01(\x0e\x32-.google.cloud.language.v1.PartOfSpeech.Proper\x12G\n\x0breciprocity\x18\n \x01(\x0e\x32\x32.google.cloud.language.v1.PartOfSpeech.Reciprocity\x12;\n\x05tense\x18\x0b \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Tense\x12;\n\x05voice\x18\x0c \x01(\x0e\x32,.google.cloud.language.v1.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\xd8\x07\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12=\n\x05label\x18\x02 \x01(\x0e\x32..google.cloud.language.v1.DependencyEdge.Label\"\xec\x06\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\"\xaf\x01\n\rEntityMention\x12\x30\n\x04text\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.TextSpan\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.google.cloud.language.v1.EntityMention.Type\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\"\x8e\x01\n\x17\x41nalyzeSentimentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\xa4\x01\n\x18\x41nalyzeSentimentResponse\x12?\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12\x35\n\tsentences\x18\x03 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\"\x8d\x01\n\x16\x41nalyzeEntitiesRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"_\n\x17\x41nalyzeEntitiesResponse\x12\x32\n\x08\x65ntities\x18\x01 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x8b\x01\n\x14\x41nalyzeSyntaxRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12=\n\rencoding_type\x18\x02 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\"\x91\x01\n\x15\x41nalyzeSyntaxResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"\xb6\x02\n\x13\x41nnotateTextRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\".google.cloud.language.v1.Document\x12H\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32\x36.google.cloud.language.v1.AnnotateTextRequest.Features\x12=\n\rencoding_type\x18\x03 \x01(\x0e\x32&.google.cloud.language.v1.EncodingType\x1a`\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\"\x85\x02\n\x14\x41nnotateTextResponse\x12\x35\n\tsentences\x18\x01 \x03(\x0b\x32\".google.cloud.language.v1.Sentence\x12/\n\x06tokens\x18\x02 \x03(\x0b\x32\x1f.google.cloud.language.v1.Token\x12\x32\n\x08\x65ntities\x18\x03 \x03(\x0b\x32 .google.cloud.language.v1.Entity\x12?\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32#.google.cloud.language.v1.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x8d\x05\n\x0fLanguageService\x12\xa4\x01\n\x10\x41nalyzeSentiment\x12\x31.google.cloud.language.v1.AnalyzeSentimentRequest\x1a\x32.google.cloud.language.v1.AnalyzeSentimentResponse\")\x82\xd3\xe4\x93\x02#\"\x1e/v1/documents:analyzeSentiment:\x01*\x12\xa0\x01\n\x0f\x41nalyzeEntities\x12\x30.google.cloud.language.v1.AnalyzeEntitiesRequest\x1a\x31.google.cloud.language.v1.AnalyzeEntitiesResponse\"(\x82\xd3\xe4\x93\x02\"\"\x1d/v1/documents:analyzeEntities:\x01*\x12\x98\x01\n\rAnalyzeSyntax\x12..google.cloud.language.v1.AnalyzeSyntaxRequest\x1a/.google.cloud.language.v1.AnalyzeSyntaxResponse\"&\x82\xd3\xe4\x93\x02 \"\x1b/v1/documents:analyzeSyntax:\x01*\x12\x94\x01\n\x0c\x41nnotateText\x12-.google.cloud.language.v1.AnnotateTextRequest\x1a..google.cloud.language.v1.AnnotateTextResponse\"%\x82\xd3\xe4\x93\x02\x1f\"\x1a/v1/documents:annotateText:\x01*Bx\n\x1c\x63om.google.cloud.language.v1B\x14LanguageServiceProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/language/v1;languageb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_ENCODINGTYPE = _descriptor.EnumDescriptor( + name='EncodingType', + full_name='google.cloud.language.v1.EncodingType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF8', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF16', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF32', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=5797, + serialized_end=5853, +) +_sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) + +EncodingType = enum_type_wrapper.EnumTypeWrapper(_ENCODINGTYPE) +NONE = 0 +UTF8 = 1 +UTF16 = 2 +UTF32 = 3 + + +_DOCUMENT_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1.Document.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLAIN_TEXT', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HTML', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=245, + serialized_end=299, +) +_sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) + +_ENTITY_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1.Entity.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERSON', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATION', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORGANIZATION', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EVENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WORK_OF_ART', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONSUMER_GOOD', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OTHER', index=7, number=7, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=699, + serialized_end=820, +) +_sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) + +_PARTOFSPEECH_TAG = _descriptor.EnumDescriptor( + name='Tag', + full_name='google.cloud.language.v1.PartOfSpeech.Tag', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADJ', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADV', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOUN', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRON', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PUNCT', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VERB', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='X', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AFFIX', index=13, number=13, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1837, + serialized_end=1978, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) + +_PARTOFSPEECH_ASPECT = _descriptor.EnumDescriptor( + name='Aspect', + full_name='google.cloud.language.v1.PartOfSpeech.Aspect', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ASPECT_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERFECTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECTIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROGRESSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1980, + serialized_end=2059, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) + +_PARTOFSPEECH_CASE = _descriptor.EnumDescriptor( + name='Case', + full_name='google.cloud.language.v1.PartOfSpeech.Case', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CASE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACCUSATIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVERBIAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GENITIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INSTRUMENTAL', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATIVE', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMINATIVE', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OBLIQUE', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTITIVE', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREPOSITIONAL', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_CASE', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RELATIVE_CASE', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=14, number=14, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2062, + serialized_end=2310, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) + +_PARTOFSPEECH_FORM = _descriptor.EnumDescriptor( + name='Form', + full_name='google.cloud.language.v1.PartOfSpeech.Form', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FORM_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADNOMIAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXILIARY', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIZER', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FINAL_ENDING', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GERUND', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REALIS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IRREALIS', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SHORT', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LONG', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORDER', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPECIFIC', index=11, number=11, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2313, + serialized_end=2488, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) + +_PARTOFSPEECH_GENDER = _descriptor.EnumDescriptor( + name='Gender', + full_name='google.cloud.language.v1.PartOfSpeech.Gender', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='GENDER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FEMININE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MASCULINE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEUTER', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2490, + serialized_end=2559, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) + +_PARTOFSPEECH_MOOD = _descriptor.EnumDescriptor( + name='Mood', + full_name='google.cloud.language.v1.PartOfSpeech.Mood', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MOOD_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_MOOD', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INDICATIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INTERROGATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JUSSIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUBJUNCTIVE', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2561, + serialized_end=2688, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) + +_PARTOFSPEECH_NUMBER = _descriptor.EnumDescriptor( + name='Number', + full_name='google.cloud.language.v1.PartOfSpeech.Number', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NUMBER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SINGULAR', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLURAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DUAL', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2690, + serialized_end=2754, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) + +_PARTOFSPEECH_PERSON = _descriptor.EnumDescriptor( + name='Person', + full_name='google.cloud.language.v1.PartOfSpeech.Person', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PERSON_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FIRST', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SECOND', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='THIRD', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_PERSON', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2756, + serialized_end=2840, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) + +_PARTOFSPEECH_PROPER = _descriptor.EnumDescriptor( + name='Proper', + full_name='google.cloud.language.v1.PartOfSpeech.Proper', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PROPER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOT_PROPER', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2842, + serialized_end=2898, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) + +_PARTOFSPEECH_RECIPROCITY = _descriptor.EnumDescriptor( + name='Reciprocity', + full_name='google.cloud.language.v1.PartOfSpeech.Reciprocity', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='RECIPROCITY_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RECIPROCAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NON_RECIPROCAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2900, + serialized_end=2974, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) + +_PARTOFSPEECH_TENSE = _descriptor.EnumDescriptor( + name='Tense', + full_name='google.cloud.language.v1.PartOfSpeech.Tense', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TENSE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_TENSE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FUTURE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PAST', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRESENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECT', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLUPERFECT', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2976, + serialized_end=3091, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) + +_PARTOFSPEECH_VOICE = _descriptor.EnumDescriptor( + name='Voice', + full_name='google.cloud.language.v1.PartOfSpeech.Voice', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VOICE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAUSATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PASSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3093, + serialized_end=3159, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) + +_DEPENDENCYEDGE_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.cloud.language.v1.DependencyEdge.Label', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ABBREV', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACOMP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVCL', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVMOD', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AMOD', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='APPOS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ATTR', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUX', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXPASS', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CC', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CCOMP', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJ', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJPASS', index=14, number=14, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DEP', index=15, number=15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=16, number=16, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISCOURSE', index=17, number=17, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DOBJ', index=18, number=18, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EXPL', index=19, number=19, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GOESWITH', index=20, number=20, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IOBJ', index=21, number=21, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MARK', index=22, number=22, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWE', index=23, number=23, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWV', index=24, number=24, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEG', index=25, number=25, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NN', index=26, number=26, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NPADVMOD', index=27, number=27, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJ', index=28, number=28, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJPASS', index=29, number=29, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=30, number=30, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMBER', index=31, number=31, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='P', index=32, number=32, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARATAXIS', index=33, number=33, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTMOD', index=34, number=34, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PCOMP', index=35, number=35, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POBJ', index=36, number=36, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSS', index=37, number=37, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSTNEG', index=38, number=38, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECOMP', index=39, number=39, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECONJ', index=40, number=40, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREDET', index=41, number=41, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREF', index=42, number=42, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREP', index=43, number=43, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRONL', index=44, number=44, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=45, number=45, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PS', index=46, number=46, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='QUANTMOD', index=47, number=47, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMOD', index=48, number=48, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMODREL', index=49, number=49, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RDROP', index=50, number=50, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REF', index=51, number=51, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REMNANT', index=52, number=52, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REPARANDUM', index=53, number=53, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ROOT', index=54, number=54, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SNUM', index=55, number=55, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFF', index=56, number=56, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TMOD', index=57, number=57, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TOPIC', index=58, number=58, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VMOD', index=59, number=59, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=60, number=60, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='XCOMP', index=61, number=61, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFFIX', index=62, number=62, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TITLE', index=63, number=63, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVPHMOD', index=64, number=64, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXCAUS', index=65, number=65, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXVV', index=66, number=66, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DTMOD', index=67, number=67, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN', index=68, number=68, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KW', index=69, number=69, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LIST', index=70, number=70, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMC', index=71, number=71, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJ', index=72, number=72, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJPASS', index=73, number=73, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMC', index=74, number=74, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COP', index=75, number=75, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISLOCATED', index=76, number=76, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3270, + serialized_end=4146, +) +_sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) + +_ENTITYMENTION_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1.EntityMention.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMMON', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=4276, + serialized_end=4324, +) +_sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) + + +_DOCUMENT = _descriptor.Descriptor( + name='Document', + full_name='google.cloud.language.v1.Document', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1.Document.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1.Document.content', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gcs_content_uri', full_name='google.cloud.language.v1.Document.gcs_content_uri', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.Document.language', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DOCUMENT_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='source', full_name='google.cloud.language.v1.Document.source', + index=0, containing_type=None, fields=[]), + ], + serialized_start=114, + serialized_end=309, +) + + +_SENTENCE = _descriptor.Descriptor( + name='Sentence', + full_name='google.cloud.language.v1.Sentence', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1.Sentence.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1.Sentence.sentiment', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=311, + serialized_end=427, +) + + +_ENTITY_METADATAENTRY = _descriptor.Descriptor( + name='MetadataEntry', + full_name='google.cloud.language.v1.Entity.MetadataEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.language.v1.Entity.MetadataEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.language.v1.Entity.MetadataEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=650, + serialized_end=697, +) + +_ENTITY = _descriptor.Descriptor( + name='Entity', + full_name='google.cloud.language.v1.Entity', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.language.v1.Entity.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1.Entity.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metadata', full_name='google.cloud.language.v1.Entity.metadata', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='salience', full_name='google.cloud.language.v1.Entity.salience', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mentions', full_name='google.cloud.language.v1.Entity.mentions', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ENTITY_METADATAENTRY, ], + enum_types=[ + _ENTITY_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=430, + serialized_end=820, +) + + +_TOKEN = _descriptor.Descriptor( + name='Token', + full_name='google.cloud.language.v1.Token', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1.Token.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='part_of_speech', full_name='google.cloud.language.v1.Token.part_of_speech', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dependency_edge', full_name='google.cloud.language.v1.Token.dependency_edge', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lemma', full_name='google.cloud.language.v1.Token.lemma', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=823, + serialized_end=1026, +) + + +_SENTIMENT = _descriptor.Descriptor( + name='Sentiment', + full_name='google.cloud.language.v1.Sentiment', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='magnitude', full_name='google.cloud.language.v1.Sentiment.magnitude', index=0, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='score', full_name='google.cloud.language.v1.Sentiment.score', index=1, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1028, + serialized_end=1073, +) + + +_PARTOFSPEECH = _descriptor.Descriptor( + name='PartOfSpeech', + full_name='google.cloud.language.v1.PartOfSpeech', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tag', full_name='google.cloud.language.v1.PartOfSpeech.tag', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='aspect', full_name='google.cloud.language.v1.PartOfSpeech.aspect', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='case', full_name='google.cloud.language.v1.PartOfSpeech.case', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='form', full_name='google.cloud.language.v1.PartOfSpeech.form', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gender', full_name='google.cloud.language.v1.PartOfSpeech.gender', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mood', full_name='google.cloud.language.v1.PartOfSpeech.mood', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.cloud.language.v1.PartOfSpeech.number', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='person', full_name='google.cloud.language.v1.PartOfSpeech.person', index=7, + number=8, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='proper', full_name='google.cloud.language.v1.PartOfSpeech.proper', index=8, + number=9, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reciprocity', full_name='google.cloud.language.v1.PartOfSpeech.reciprocity', index=9, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tense', full_name='google.cloud.language.v1.PartOfSpeech.tense', index=10, + number=11, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='voice', full_name='google.cloud.language.v1.PartOfSpeech.voice', index=11, + number=12, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PARTOFSPEECH_TAG, + _PARTOFSPEECH_ASPECT, + _PARTOFSPEECH_CASE, + _PARTOFSPEECH_FORM, + _PARTOFSPEECH_GENDER, + _PARTOFSPEECH_MOOD, + _PARTOFSPEECH_NUMBER, + _PARTOFSPEECH_PERSON, + _PARTOFSPEECH_PROPER, + _PARTOFSPEECH_RECIPROCITY, + _PARTOFSPEECH_TENSE, + _PARTOFSPEECH_VOICE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1076, + serialized_end=3159, +) + + +_DEPENDENCYEDGE = _descriptor.Descriptor( + name='DependencyEdge', + full_name='google.cloud.language.v1.DependencyEdge', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='head_token_index', full_name='google.cloud.language.v1.DependencyEdge.head_token_index', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label', full_name='google.cloud.language.v1.DependencyEdge.label', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DEPENDENCYEDGE_LABEL, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3162, + serialized_end=4146, +) + + +_ENTITYMENTION = _descriptor.Descriptor( + name='EntityMention', + full_name='google.cloud.language.v1.EntityMention', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1.EntityMention.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1.EntityMention.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ENTITYMENTION_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4149, + serialized_end=4324, +) + + +_TEXTSPAN = _descriptor.Descriptor( + name='TextSpan', + full_name='google.cloud.language.v1.TextSpan', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1.TextSpan.content', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='begin_offset', full_name='google.cloud.language.v1.TextSpan.begin_offset', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4326, + serialized_end=4375, +) + + +_ANALYZESENTIMENTREQUEST = _descriptor.Descriptor( + name='AnalyzeSentimentRequest', + full_name='google.cloud.language.v1.AnalyzeSentimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnalyzeSentimentRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnalyzeSentimentRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4378, + serialized_end=4520, +) + + +_ANALYZESENTIMENTRESPONSE = _descriptor.Descriptor( + name='AnalyzeSentimentResponse', + full_name='google.cloud.language.v1.AnalyzeSentimentResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1.AnalyzeSentimentResponse.document_sentiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnalyzeSentimentResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1.AnalyzeSentimentResponse.sentences', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4523, + serialized_end=4687, +) + + +_ANALYZEENTITIESREQUEST = _descriptor.Descriptor( + name='AnalyzeEntitiesRequest', + full_name='google.cloud.language.v1.AnalyzeEntitiesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnalyzeEntitiesRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnalyzeEntitiesRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4690, + serialized_end=4831, +) + + +_ANALYZEENTITIESRESPONSE = _descriptor.Descriptor( + name='AnalyzeEntitiesResponse', + full_name='google.cloud.language.v1.AnalyzeEntitiesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1.AnalyzeEntitiesResponse.entities', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnalyzeEntitiesResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4833, + serialized_end=4928, +) + + +_ANALYZESYNTAXREQUEST = _descriptor.Descriptor( + name='AnalyzeSyntaxRequest', + full_name='google.cloud.language.v1.AnalyzeSyntaxRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnalyzeSyntaxRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnalyzeSyntaxRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4931, + serialized_end=5070, +) + + +_ANALYZESYNTAXRESPONSE = _descriptor.Descriptor( + name='AnalyzeSyntaxResponse', + full_name='google.cloud.language.v1.AnalyzeSyntaxResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1.AnalyzeSyntaxResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1.AnalyzeSyntaxResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnalyzeSyntaxResponse.language', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5073, + serialized_end=5218, +) + + +_ANNOTATETEXTREQUEST_FEATURES = _descriptor.Descriptor( + name='Features', + full_name='google.cloud.language.v1.AnnotateTextRequest.Features', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='extract_syntax', full_name='google.cloud.language.v1.AnnotateTextRequest.Features.extract_syntax', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_entities', full_name='google.cloud.language.v1.AnnotateTextRequest.Features.extract_entities', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_document_sentiment', full_name='google.cloud.language.v1.AnnotateTextRequest.Features.extract_document_sentiment', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5435, + serialized_end=5531, +) + +_ANNOTATETEXTREQUEST = _descriptor.Descriptor( + name='AnnotateTextRequest', + full_name='google.cloud.language.v1.AnnotateTextRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1.AnnotateTextRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='features', full_name='google.cloud.language.v1.AnnotateTextRequest.features', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1.AnnotateTextRequest.encoding_type', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ANNOTATETEXTREQUEST_FEATURES, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5221, + serialized_end=5531, +) + + +_ANNOTATETEXTRESPONSE = _descriptor.Descriptor( + name='AnnotateTextResponse', + full_name='google.cloud.language.v1.AnnotateTextResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1.AnnotateTextResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1.AnnotateTextResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1.AnnotateTextResponse.entities', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1.AnnotateTextResponse.document_sentiment', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1.AnnotateTextResponse.language', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5534, + serialized_end=5795, +) + +_DOCUMENT.fields_by_name['type'].enum_type = _DOCUMENT_TYPE +_DOCUMENT_TYPE.containing_type = _DOCUMENT +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['content']) +_DOCUMENT.fields_by_name['content'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['gcs_content_uri']) +_DOCUMENT.fields_by_name['gcs_content_uri'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_SENTENCE.fields_by_name['text'].message_type = _TEXTSPAN +_SENTENCE.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITY_METADATAENTRY.containing_type = _ENTITY +_ENTITY.fields_by_name['type'].enum_type = _ENTITY_TYPE +_ENTITY.fields_by_name['metadata'].message_type = _ENTITY_METADATAENTRY +_ENTITY.fields_by_name['mentions'].message_type = _ENTITYMENTION +_ENTITY_TYPE.containing_type = _ENTITY +_TOKEN.fields_by_name['text'].message_type = _TEXTSPAN +_TOKEN.fields_by_name['part_of_speech'].message_type = _PARTOFSPEECH +_TOKEN.fields_by_name['dependency_edge'].message_type = _DEPENDENCYEDGE +_PARTOFSPEECH.fields_by_name['tag'].enum_type = _PARTOFSPEECH_TAG +_PARTOFSPEECH.fields_by_name['aspect'].enum_type = _PARTOFSPEECH_ASPECT +_PARTOFSPEECH.fields_by_name['case'].enum_type = _PARTOFSPEECH_CASE +_PARTOFSPEECH.fields_by_name['form'].enum_type = _PARTOFSPEECH_FORM +_PARTOFSPEECH.fields_by_name['gender'].enum_type = _PARTOFSPEECH_GENDER +_PARTOFSPEECH.fields_by_name['mood'].enum_type = _PARTOFSPEECH_MOOD +_PARTOFSPEECH.fields_by_name['number'].enum_type = _PARTOFSPEECH_NUMBER +_PARTOFSPEECH.fields_by_name['person'].enum_type = _PARTOFSPEECH_PERSON +_PARTOFSPEECH.fields_by_name['proper'].enum_type = _PARTOFSPEECH_PROPER +_PARTOFSPEECH.fields_by_name['reciprocity'].enum_type = _PARTOFSPEECH_RECIPROCITY +_PARTOFSPEECH.fields_by_name['tense'].enum_type = _PARTOFSPEECH_TENSE +_PARTOFSPEECH.fields_by_name['voice'].enum_type = _PARTOFSPEECH_VOICE +_PARTOFSPEECH_TAG.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_ASPECT.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_CASE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_FORM.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_GENDER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_MOOD.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_NUMBER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PERSON.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PROPER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_RECIPROCITY.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_TENSE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_VOICE.containing_type = _PARTOFSPEECH +_DEPENDENCYEDGE.fields_by_name['label'].enum_type = _DEPENDENCYEDGE_LABEL +_DEPENDENCYEDGE_LABEL.containing_type = _DEPENDENCYEDGE +_ENTITYMENTION.fields_by_name['text'].message_type = _TEXTSPAN +_ENTITYMENTION.fields_by_name['type'].enum_type = _ENTITYMENTION_TYPE +_ENTITYMENTION_TYPE.containing_type = _ENTITYMENTION +_ANALYZESENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESENTIMENTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +_ANALYZESENTIMENTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZEENTITIESREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZEENTITIESREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZEENTITIESRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANALYZESYNTAXREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESYNTAXREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESYNTAXRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZESYNTAXRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTREQUEST_FEATURES.containing_type = _ANNOTATETEXTREQUEST +_ANNOTATETEXTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANNOTATETEXTREQUEST.fields_by_name['features'].message_type = _ANNOTATETEXTREQUEST_FEATURES +_ANNOTATETEXTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANNOTATETEXTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANNOTATETEXTRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANNOTATETEXTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +DESCRIPTOR.message_types_by_name['Document'] = _DOCUMENT +DESCRIPTOR.message_types_by_name['Sentence'] = _SENTENCE +DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY +DESCRIPTOR.message_types_by_name['Token'] = _TOKEN +DESCRIPTOR.message_types_by_name['Sentiment'] = _SENTIMENT +DESCRIPTOR.message_types_by_name['PartOfSpeech'] = _PARTOFSPEECH +DESCRIPTOR.message_types_by_name['DependencyEdge'] = _DEPENDENCYEDGE +DESCRIPTOR.message_types_by_name['EntityMention'] = _ENTITYMENTION +DESCRIPTOR.message_types_by_name['TextSpan'] = _TEXTSPAN +DESCRIPTOR.message_types_by_name['AnalyzeSentimentRequest'] = _ANALYZESENTIMENTREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSentimentResponse'] = _ANALYZESENTIMENTRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesRequest'] = _ANALYZEENTITIESREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesResponse'] = _ANALYZEENTITIESRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxRequest'] = _ANALYZESYNTAXREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxResponse'] = _ANALYZESYNTAXRESPONSE +DESCRIPTOR.message_types_by_name['AnnotateTextRequest'] = _ANNOTATETEXTREQUEST +DESCRIPTOR.message_types_by_name['AnnotateTextResponse'] = _ANNOTATETEXTRESPONSE +DESCRIPTOR.enum_types_by_name['EncodingType'] = _ENCODINGTYPE + +Document = _reflection.GeneratedProtocolMessageType('Document', (_message.Message,), dict( + DESCRIPTOR = _DOCUMENT, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents the input to API methods. + + + Attributes: + type: + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + source: + The source of the document: a string containing the content or + a Google Cloud Storage URI. + content: + The content of the input in string format. + gcs_content_uri: + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket\_name/object\_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + language: + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language codes + are accepted. `Language Support + <https://cloud.google.com/natural-language/docs/languages>`__ + lists currently supported languages for each API method. If + the language (either specified by the caller or automatically + detected) is not supported by the called API method, an + ``INVALID_ARGUMENT`` error is returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Document) + )) +_sym_db.RegisterMessage(Document) + +Sentence = _reflection.GeneratedProtocolMessageType('Sentence', (_message.Message,), dict( + DESCRIPTOR = _SENTENCE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents a sentence in the input document. + + + Attributes: + text: + The sentence text. + sentiment: + For calls to [AnalyzeSentiment][] or if [AnnotateTextRequest.F + eatures.extract\_document\_sentiment][google.cloud.language.v1 + .AnnotateTextRequest.Features.extract\_document\_sentiment] is + set to true, this field will contain the sentiment for the + sentence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Sentence) + )) +_sym_db.RegisterMessage(Sentence) + +Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( + + MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict( + DESCRIPTOR = _ENTITY_METADATAENTRY, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity.MetadataEntry) + )) + , + DESCRIPTOR = _ENTITY, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents a phrase in the text that is a known entity, such as a + person, an organization, or location. The API associates information, + such as salience and mentions, with entities. + + + Attributes: + name: + The representative name for the entity. + type: + The entity type. + metadata: + Metadata associated with the entity. Currently, Wikipedia + URLs and Knowledge Graph MIDs are provided, if available. The + associated keys are "wikipedia\_url" and "mid", respectively. + salience: + The salience score associated with the entity in the [0, 1.0] + range. The salience score for an entity provides information + about the importance or centrality of that entity to the + entire document text. Scores closer to 0 are less salient, + while scores closer to 1.0 are highly salient. + mentions: + The mentions of this entity in the input document. The API + currently supports proper noun mentions. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Entity) + )) +_sym_db.RegisterMessage(Entity) +_sym_db.RegisterMessage(Entity.MetadataEntry) + +Token = _reflection.GeneratedProtocolMessageType('Token', (_message.Message,), dict( + DESCRIPTOR = _TOKEN, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents the smallest syntactic building block of the text. + + + Attributes: + text: + The token text. + part_of_speech: + Parts of speech tag for this token. + dependency_edge: + Dependency tree parse for this token. + lemma: + `Lemma + <https://en.wikipedia.org/wiki/Lemma_%28morphology%29>`__ of + the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Token) + )) +_sym_db.RegisterMessage(Token) + +Sentiment = _reflection.GeneratedProtocolMessageType('Sentiment', (_message.Message,), dict( + DESCRIPTOR = _SENTIMENT, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents the feeling associated with the entire text or entities in + the text. + + + Attributes: + magnitude: + A non-negative number in the [0, +inf) range, which represents + the absolute magnitude of sentiment regardless of score + (positive or negative). + score: + Sentiment score between -1.0 (negative sentiment) and 1.0 + (positive sentiment). + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.Sentiment) + )) +_sym_db.RegisterMessage(Sentiment) + +PartOfSpeech = _reflection.GeneratedProtocolMessageType('PartOfSpeech', (_message.Message,), dict( + DESCRIPTOR = _PARTOFSPEECH, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents part of speech information for a token. Parts of speech are + as defined in + http://www.lrec-conf.org/proceedings/lrec2012/pdf/274\_Paper.pdf + + + Attributes: + tag: + The part of speech tag. + aspect: + The grammatical aspect. + case: + The grammatical case. + form: + The grammatical form. + gender: + The grammatical gender. + mood: + The grammatical mood. + number: + The grammatical number. + person: + The grammatical person. + proper: + The grammatical properness. + reciprocity: + The grammatical reciprocity. + tense: + The grammatical tense. + voice: + The grammatical voice. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.PartOfSpeech) + )) +_sym_db.RegisterMessage(PartOfSpeech) + +DependencyEdge = _reflection.GeneratedProtocolMessageType('DependencyEdge', (_message.Message,), dict( + DESCRIPTOR = _DEPENDENCYEDGE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents dependency parse tree information for a token. (For more + information on dependency labels, see + http://www.aclweb.org/anthology/P13-2017 + + + Attributes: + head_token_index: + Represents the head of this token in the dependency tree. This + is the index of the token which has an arc going to this + token. The index is the position of the token in the array of + tokens returned by the API method. If this token is a root + token, then the ``head_token_index`` is its own index. + label: + The parse label for the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.DependencyEdge) + )) +_sym_db.RegisterMessage(DependencyEdge) + +EntityMention = _reflection.GeneratedProtocolMessageType('EntityMention', (_message.Message,), dict( + DESCRIPTOR = _ENTITYMENTION, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents a mention for an entity in the text. Currently, proper noun + mentions are supported. + + + Attributes: + text: + The mention text. + type: + The type of the entity mention. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.EntityMention) + )) +_sym_db.RegisterMessage(EntityMention) + +TextSpan = _reflection.GeneratedProtocolMessageType('TextSpan', (_message.Message,), dict( + DESCRIPTOR = _TEXTSPAN, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """Represents an output piece of text. + + + Attributes: + content: + The content of the output text. + begin_offset: + The API calculates the beginning offset of the content in the + original document according to the + [EncodingType][google.cloud.language.v1.EncodingType] + specified in the API request. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.TextSpan) + )) +_sym_db.RegisterMessage(TextSpan) + +AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The sentiment analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate sentence + offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSentimentRequest) + )) +_sym_db.RegisterMessage(AnalyzeSentimentRequest) + +AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The sentiment analysis response message. + + + Attributes: + document_sentiment: + The overall sentiment of the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + sentences: + The sentiment for all the sentences in the document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSentimentResponse) + )) +_sym_db.RegisterMessage(AnalyzeSentimentResponse) + +AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The entity analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitiesRequest) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesRequest) + +AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The entity analysis response message. + + + Attributes: + entities: + The recognized entities in the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeEntitiesResponse) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesResponse) + +AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The syntax analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSyntaxRequest) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxRequest) + +AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The syntax analysis response message. + + + Attributes: + sentences: + Sentences in the input document. + tokens: + Tokens, along with their syntactic information, in the input + document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnalyzeSyntaxResponse) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxResponse) + +AnnotateTextRequest = _reflection.GeneratedProtocolMessageType('AnnotateTextRequest', (_message.Message,), dict( + + Features = _reflection.GeneratedProtocolMessageType('Features', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTREQUEST_FEATURES, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """All available features for sentiment, syntax, and semantic analysis. + Setting each one to true will enable that specific analysis for the + input. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextRequest.Features) + )) + , + DESCRIPTOR = _ANNOTATETEXTREQUEST, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The request message for the text annotation API, which can perform + multiple analysis types (sentiment, entities, and syntax) in one call. + + + Attributes: + extract_syntax: + Extract syntax information. + extract_entities: + Extract entities. + extract_document_sentiment: + Extract document-level sentiment. + document: + Input document. + features: + The enabled features. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextRequest) + )) +_sym_db.RegisterMessage(AnnotateTextRequest) +_sym_db.RegisterMessage(AnnotateTextRequest.Features) + +AnnotateTextResponse = _reflection.GeneratedProtocolMessageType('AnnotateTextResponse', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTRESPONSE, + __module__ = 'google.cloud.proto.language.v1.language_service_pb2' + , + __doc__ = """The text annotations response message. + + + Attributes: + sentences: + Sentences in the input document. Populated if the user enables + [AnnotateTextRequest.Features.extract\_syntax][google.cloud.la + nguage.v1.AnnotateTextRequest.Features.extract\_syntax]. + tokens: + Tokens, along with their syntactic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_syntax][google.cloud.language.v1.AnnotateText + Request.Features.extract\_syntax]. + entities: + Entities, along with their semantic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_entities][google.cloud.language.v1.AnnotateTe + xtRequest.Features.extract\_entities]. + document_sentiment: + The overall sentiment for the document. Populated if the user + enables [AnnotateTextRequest.Features.extract\_document\_senti + ment][google.cloud.language.v1.AnnotateTextRequest.Features.ex + tract\_document\_sentiment]. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1.Document.language] field for more details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1.AnnotateTextResponse) + )) +_sym_db.RegisterMessage(AnnotateTextResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.cloud.language.v1B\024LanguageServiceProtoP\001Z@google.golang.org/genproto/googleapis/cloud/language/v1;language')) +_ENTITY_METADATAENTRY.has_options = True +_ENTITY_METADATAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=AnalyzeSentimentRequest.SerializeToString, + response_deserializer=AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=AnnotateTextRequest.SerializeToString, + response_deserializer=AnnotateTextResponse.FromString, + ) + + + class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=AnalyzeSentimentRequest.FromString, + response_serializer=AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=AnalyzeEntitiesRequest.FromString, + response_serializer=AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=AnalyzeSyntaxRequest.FromString, + response_serializer=AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=AnnotateTextRequest.FromString, + response_serializer=AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaLanguageServiceServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnnotateText(self, request, context): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaLanguageServiceStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the sentiment of the provided text. + """ + raise NotImplementedError() + AnalyzeSentiment.future = None + def AnalyzeEntities(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + raise NotImplementedError() + AnalyzeEntities.future = None + def AnalyzeSyntax(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + raise NotImplementedError() + AnalyzeSyntax.future = None + def AnnotateText(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + raise NotImplementedError() + AnnotateText.future = None + + + def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.FromString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextRequest.FromString, + } + response_serializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextResponse.SerializeToString, + } + method_implementations = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): face_utilities.unary_unary_inline(servicer.AnalyzeEntities), + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeSentiment), + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): face_utilities.unary_unary_inline(servicer.AnalyzeSyntax), + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): face_utilities.unary_unary_inline(servicer.AnnotateText), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.SerializeToString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextRequest.SerializeToString, + } + response_deserializers = { + ('google.cloud.language.v1.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.FromString, + ('google.cloud.language.v1.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.FromString, + ('google.cloud.language.v1.LanguageService', 'AnnotateText'): AnnotateTextResponse.FromString, + } + cardinalities = { + 'AnalyzeEntities': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSentiment': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSyntax': cardinality.Cardinality.UNARY_UNARY, + 'AnnotateText': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.cloud.language.v1.LanguageService', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py b/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py new file mode 100644 index 000000000000..19ab43fae3f0 --- /dev/null +++ b/language/google/cloud/proto/language/v1/language_service_pb2_grpc.py @@ -0,0 +1,104 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.language.v1.language_service_pb2 as google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2 + + +class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSentiment', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeEntities', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnalyzeSyntax', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1.LanguageService/AnnotateText', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextResponse.FromString, + ) + + +class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all the features that analyzeSentiment, + analyzeEntities, and analyzeSyntax provide in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/google/cloud/proto/language/v1beta2/__init__.py b/language/google/cloud/proto/language/v1beta2/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/language/google/cloud/proto/language/v1beta2/__init__.py @@ -0,0 +1 @@ + diff --git a/language/google/cloud/proto/language/v1beta2/language_service_pb2.py b/language/google/cloud/proto/language/v1beta2/language_service_pb2.py new file mode 100644 index 000000000000..d3e1d150af8d --- /dev/null +++ b/language/google/cloud/proto/language/v1beta2/language_service_pb2.py @@ -0,0 +1,2843 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/language/v1beta2/language_service.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/language/v1beta2/language_service.proto', + package='google.cloud.language.v1beta2', + syntax='proto3', + serialized_pb=_b('\n:google/cloud/proto/language/v1beta2/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t\"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source\"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"\xd2\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t\"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02\"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice\"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r\"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03\"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e\"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b\"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03\"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06\"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03\"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04\"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02\"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02\"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06\"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03\"\xdd\x07\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label\"\xec\x06\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10\"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02\"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05\"\x98\x01\n\x17\x41nalyzeSentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\"\x9e\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x97\x01\n\x16\x41nalyzeEntitiesRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t\"\x95\x01\n\x14\x41nalyzeSyntaxRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t\"\xe8\x02\n\x13\x41nnotateTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12M\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.Features\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x82\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12\"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\"\x99\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x97\x07\n\x0fLanguageService\x12\xb3\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse\".\x82\xd3\xe4\x93\x02(\"#/v1beta2/documents:analyzeSentiment:\x01*\x12\xaf\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse\"-\x82\xd3\xe4\x93\x02\'\"\"/v1beta2/documents:analyzeEntities:\x01*\x12\xcb\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse\"4\x82\xd3\xe4\x93\x02.\")/v1beta2/documents:analyzeEntitySentiment:\x01*\x12\xa7\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse\"+\x82\xd3\xe4\x93\x02%\" /v1beta2/documents:analyzeSyntax:\x01*\x12\xa3\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse\"*\x82\xd3\xe4\x93\x02$\"\x1f/v1beta2/documents:annotateText:\x01*B\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +_ENCODINGTYPE = _descriptor.EnumDescriptor( + name='EncodingType', + full_name='google.cloud.language.v1beta2.EncodingType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NONE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF8', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF16', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UTF32', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=6539, + serialized_end=6595, +) +_sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) + +EncodingType = enum_type_wrapper.EnumTypeWrapper(_ENCODINGTYPE) +NONE = 0 +UTF8 = 1 +UTF16 = 2 +UTF32 = 3 + + +_DOCUMENT_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1beta2.Document.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLAIN_TEXT', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HTML', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=355, + serialized_end=409, +) +_sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) + +_ENTITY_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1beta2.Entity.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERSON', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATION', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORGANIZATION', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EVENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='WORK_OF_ART', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONSUMER_GOOD', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OTHER', index=7, number=7, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=895, + serialized_end=1016, +) +_sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) + +_PARTOFSPEECH_TAG = _descriptor.EnumDescriptor( + name='Tag', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Tag', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADJ', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADV', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOUN', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRON', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PUNCT', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VERB', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='X', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AFFIX', index=13, number=13, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2108, + serialized_end=2249, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) + +_PARTOFSPEECH_ASPECT = _descriptor.EnumDescriptor( + name='Aspect', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Aspect', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ASPECT_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PERFECTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECTIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROGRESSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2251, + serialized_end=2330, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) + +_PARTOFSPEECH_CASE = _descriptor.EnumDescriptor( + name='Case', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Case', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='CASE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACCUSATIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVERBIAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GENITIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INSTRUMENTAL', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LOCATIVE', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMINATIVE', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='OBLIQUE', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTITIVE', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREPOSITIONAL', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_CASE', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RELATIVE_CASE', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=14, number=14, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2333, + serialized_end=2581, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) + +_PARTOFSPEECH_FORM = _descriptor.EnumDescriptor( + name='Form', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Form', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FORM_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADNOMIAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXILIARY', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLEMENTIZER', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FINAL_ENDING', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GERUND', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REALIS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IRREALIS', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SHORT', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LONG', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ORDER', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SPECIFIC', index=11, number=11, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2584, + serialized_end=2759, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) + +_PARTOFSPEECH_GENDER = _descriptor.EnumDescriptor( + name='Gender', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Gender', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='GENDER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FEMININE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MASCULINE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEUTER', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2761, + serialized_end=2830, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) + +_PARTOFSPEECH_MOOD = _descriptor.EnumDescriptor( + name='Mood', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Mood', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MOOD_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_MOOD', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INDICATIVE', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='INTERROGATIVE', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='JUSSIVE', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUBJUNCTIVE', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2832, + serialized_end=2959, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) + +_PARTOFSPEECH_NUMBER = _descriptor.EnumDescriptor( + name='Number', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Number', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='NUMBER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SINGULAR', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLURAL', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DUAL', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2961, + serialized_end=3025, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) + +_PARTOFSPEECH_PERSON = _descriptor.EnumDescriptor( + name='Person', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Person', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PERSON_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FIRST', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SECOND', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='THIRD', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REFLEXIVE_PERSON', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3027, + serialized_end=3111, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) + +_PARTOFSPEECH_PROPER = _descriptor.EnumDescriptor( + name='Proper', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Proper', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PROPER_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOT_PROPER', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3113, + serialized_end=3169, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) + +_PARTOFSPEECH_RECIPROCITY = _descriptor.EnumDescriptor( + name='Reciprocity', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Reciprocity', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='RECIPROCITY_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RECIPROCAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NON_RECIPROCAL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3171, + serialized_end=3245, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) + +_PARTOFSPEECH_TENSE = _descriptor.EnumDescriptor( + name='Tense', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Tense', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TENSE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONDITIONAL_TENSE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FUTURE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PAST', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRESENT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IMPERFECT', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PLUPERFECT', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3247, + serialized_end=3362, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) + +_PARTOFSPEECH_VOICE = _descriptor.EnumDescriptor( + name='Voice', + full_name='google.cloud.language.v1beta2.PartOfSpeech.Voice', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VOICE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACTIVE', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CAUSATIVE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PASSIVE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3364, + serialized_end=3430, +) +_sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) + +_DEPENDENCYEDGE_LABEL = _descriptor.EnumDescriptor( + name='Label', + full_name='google.cloud.language.v1beta2.DependencyEdge.Label', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ABBREV', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ACOMP', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVCL', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVMOD', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AMOD', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='APPOS', index=6, number=6, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ATTR', index=7, number=7, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUX', index=8, number=8, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXPASS', index=9, number=9, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CC', index=10, number=10, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CCOMP', index=11, number=11, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CONJ', index=12, number=12, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJ', index=13, number=13, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CSUBJPASS', index=14, number=14, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DEP', index=15, number=15, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DET', index=16, number=16, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISCOURSE', index=17, number=17, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DOBJ', index=18, number=18, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='EXPL', index=19, number=19, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GOESWITH', index=20, number=20, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='IOBJ', index=21, number=21, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MARK', index=22, number=22, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWE', index=23, number=23, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MWV', index=24, number=24, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NEG', index=25, number=25, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NN', index=26, number=26, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NPADVMOD', index=27, number=27, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJ', index=28, number=28, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NSUBJPASS', index=29, number=29, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUM', index=30, number=30, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMBER', index=31, number=31, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='P', index=32, number=32, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARATAXIS', index=33, number=33, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PARTMOD', index=34, number=34, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PCOMP', index=35, number=35, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POBJ', index=36, number=36, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSS', index=37, number=37, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POSTNEG', index=38, number=38, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECOMP', index=39, number=39, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRECONJ', index=40, number=40, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREDET', index=41, number=41, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREF', index=42, number=42, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PREP', index=43, number=43, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRONL', index=44, number=44, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PRT', index=45, number=45, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PS', index=46, number=46, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='QUANTMOD', index=47, number=47, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMOD', index=48, number=48, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RCMODREL', index=49, number=49, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RDROP', index=50, number=50, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REF', index=51, number=51, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REMNANT', index=52, number=52, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REPARANDUM', index=53, number=53, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ROOT', index=54, number=54, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SNUM', index=55, number=55, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFF', index=56, number=56, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TMOD', index=57, number=57, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TOPIC', index=58, number=58, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VMOD', index=59, number=59, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VOCATIVE', index=60, number=60, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='XCOMP', index=61, number=61, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SUFFIX', index=62, number=62, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TITLE', index=63, number=63, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADVPHMOD', index=64, number=64, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXCAUS', index=65, number=65, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='AUXVV', index=66, number=66, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DTMOD', index=67, number=67, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FOREIGN', index=68, number=68, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='KW', index=69, number=69, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='LIST', index=70, number=70, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMC', index=71, number=71, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJ', index=72, number=72, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NOMCSUBJPASS', index=73, number=73, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='NUMC', index=74, number=74, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COP', index=75, number=75, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISLOCATED', index=76, number=76, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=3546, + serialized_end=4422, +) +_sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) + +_ENTITYMENTION_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='google.cloud.language.v1beta2.EntityMention.Type', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TYPE_UNKNOWN', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PROPER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMMON', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=4623, + serialized_end=4671, +) +_sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) + + +_DOCUMENT = _descriptor.Descriptor( + name='Document', + full_name='google.cloud.language.v1beta2.Document', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1beta2.Document.type', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1beta2.Document.content', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gcs_content_uri', full_name='google.cloud.language.v1beta2.Document.gcs_content_uri', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.Document.language', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DOCUMENT_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='source', full_name='google.cloud.language.v1beta2.Document.source', + index=0, containing_type=None, fields=[]), + ], + serialized_start=219, + serialized_end=419, +) + + +_SENTENCE = _descriptor.Descriptor( + name='Sentence', + full_name='google.cloud.language.v1beta2.Sentence', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1beta2.Sentence.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1beta2.Sentence.sentiment', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=421, + serialized_end=547, +) + + +_ENTITY_METADATAENTRY = _descriptor.Descriptor( + name='MetadataEntry', + full_name='google.cloud.language.v1beta2.Entity.MetadataEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.language.v1beta2.Entity.MetadataEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.language.v1beta2.Entity.MetadataEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=846, + serialized_end=893, +) + +_ENTITY = _descriptor.Descriptor( + name='Entity', + full_name='google.cloud.language.v1beta2.Entity', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.language.v1beta2.Entity.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1beta2.Entity.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='metadata', full_name='google.cloud.language.v1beta2.Entity.metadata', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='salience', full_name='google.cloud.language.v1beta2.Entity.salience', index=3, + number=4, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mentions', full_name='google.cloud.language.v1beta2.Entity.mentions', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1beta2.Entity.sentiment', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ENTITY_METADATAENTRY, ], + enum_types=[ + _ENTITY_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=550, + serialized_end=1016, +) + + +_TOKEN = _descriptor.Descriptor( + name='Token', + full_name='google.cloud.language.v1beta2.Token', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1beta2.Token.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='part_of_speech', full_name='google.cloud.language.v1beta2.Token.part_of_speech', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dependency_edge', full_name='google.cloud.language.v1beta2.Token.dependency_edge', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lemma', full_name='google.cloud.language.v1beta2.Token.lemma', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1019, + serialized_end=1237, +) + + +_SENTIMENT = _descriptor.Descriptor( + name='Sentiment', + full_name='google.cloud.language.v1beta2.Sentiment', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='magnitude', full_name='google.cloud.language.v1beta2.Sentiment.magnitude', index=0, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='score', full_name='google.cloud.language.v1beta2.Sentiment.score', index=1, + number=3, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1239, + serialized_end=1284, +) + + +_PARTOFSPEECH = _descriptor.Descriptor( + name='PartOfSpeech', + full_name='google.cloud.language.v1beta2.PartOfSpeech', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tag', full_name='google.cloud.language.v1beta2.PartOfSpeech.tag', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='aspect', full_name='google.cloud.language.v1beta2.PartOfSpeech.aspect', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='case', full_name='google.cloud.language.v1beta2.PartOfSpeech.case', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='form', full_name='google.cloud.language.v1beta2.PartOfSpeech.form', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='gender', full_name='google.cloud.language.v1beta2.PartOfSpeech.gender', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='mood', full_name='google.cloud.language.v1beta2.PartOfSpeech.mood', index=5, + number=6, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='number', full_name='google.cloud.language.v1beta2.PartOfSpeech.number', index=6, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='person', full_name='google.cloud.language.v1beta2.PartOfSpeech.person', index=7, + number=8, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='proper', full_name='google.cloud.language.v1beta2.PartOfSpeech.proper', index=8, + number=9, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='reciprocity', full_name='google.cloud.language.v1beta2.PartOfSpeech.reciprocity', index=9, + number=10, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tense', full_name='google.cloud.language.v1beta2.PartOfSpeech.tense', index=10, + number=11, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='voice', full_name='google.cloud.language.v1beta2.PartOfSpeech.voice', index=11, + number=12, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _PARTOFSPEECH_TAG, + _PARTOFSPEECH_ASPECT, + _PARTOFSPEECH_CASE, + _PARTOFSPEECH_FORM, + _PARTOFSPEECH_GENDER, + _PARTOFSPEECH_MOOD, + _PARTOFSPEECH_NUMBER, + _PARTOFSPEECH_PERSON, + _PARTOFSPEECH_PROPER, + _PARTOFSPEECH_RECIPROCITY, + _PARTOFSPEECH_TENSE, + _PARTOFSPEECH_VOICE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1287, + serialized_end=3430, +) + + +_DEPENDENCYEDGE = _descriptor.Descriptor( + name='DependencyEdge', + full_name='google.cloud.language.v1beta2.DependencyEdge', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='head_token_index', full_name='google.cloud.language.v1beta2.DependencyEdge.head_token_index', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='label', full_name='google.cloud.language.v1beta2.DependencyEdge.label', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DEPENDENCYEDGE_LABEL, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3433, + serialized_end=4422, +) + + +_ENTITYMENTION = _descriptor.Descriptor( + name='EntityMention', + full_name='google.cloud.language.v1beta2.EntityMention', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='text', full_name='google.cloud.language.v1beta2.EntityMention.text', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='google.cloud.language.v1beta2.EntityMention.type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentiment', full_name='google.cloud.language.v1beta2.EntityMention.sentiment', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _ENTITYMENTION_TYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4425, + serialized_end=4671, +) + + +_TEXTSPAN = _descriptor.Descriptor( + name='TextSpan', + full_name='google.cloud.language.v1beta2.TextSpan', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='google.cloud.language.v1beta2.TextSpan.content', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='begin_offset', full_name='google.cloud.language.v1beta2.TextSpan.begin_offset', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4673, + serialized_end=4722, +) + + +_ANALYZESENTIMENTREQUEST = _descriptor.Descriptor( + name='AnalyzeSentimentRequest', + full_name='google.cloud.language.v1beta2.AnalyzeSentimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeSentimentRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeSentimentRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4725, + serialized_end=4877, +) + + +_ANALYZESENTIMENTRESPONSE = _descriptor.Descriptor( + name='AnalyzeSentimentResponse', + full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse.document_sentiment', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1beta2.AnalyzeSentimentResponse.sentences', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=4880, + serialized_end=5054, +) + + +_ANALYZEENTITYSENTIMENTREQUEST = _descriptor.Descriptor( + name='AnalyzeEntitySentimentRequest', + full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5057, + serialized_end=5215, +) + + +_ANALYZEENTITYSENTIMENTRESPONSE = _descriptor.Descriptor( + name='AnalyzeEntitySentimentResponse', + full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse.entities', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5217, + serialized_end=5324, +) + + +_ANALYZEENTITIESREQUEST = _descriptor.Descriptor( + name='AnalyzeEntitiesRequest', + full_name='google.cloud.language.v1beta2.AnalyzeEntitiesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5327, + serialized_end=5478, +) + + +_ANALYZEENTITIESRESPONSE = _descriptor.Descriptor( + name='AnalyzeEntitiesResponse', + full_name='google.cloud.language.v1beta2.AnalyzeEntitiesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesResponse.entities', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeEntitiesResponse.language', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5480, + serialized_end=5580, +) + + +_ANALYZESYNTAXREQUEST = _descriptor.Descriptor( + name='AnalyzeSyntaxRequest', + full_name='google.cloud.language.v1beta2.AnalyzeSyntaxRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxRequest.encoding_type', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5583, + serialized_end=5732, +) + + +_ANALYZESYNTAXRESPONSE = _descriptor.Descriptor( + name='AnalyzeSyntaxResponse', + full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnalyzeSyntaxResponse.language', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5735, + serialized_end=5890, +) + + +_ANNOTATETEXTREQUEST_FEATURES = _descriptor.Descriptor( + name='Features', + full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='extract_syntax', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_syntax', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_entities', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entities', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_document_sentiment', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='extract_entity_sentiment', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6123, + serialized_end=6253, +) + +_ANNOTATETEXTREQUEST = _descriptor.Descriptor( + name='AnnotateTextRequest', + full_name='google.cloud.language.v1beta2.AnnotateTextRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='document', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.document', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='features', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.features', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='encoding_type', full_name='google.cloud.language.v1beta2.AnnotateTextRequest.encoding_type', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_ANNOTATETEXTREQUEST_FEATURES, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=5893, + serialized_end=6253, +) + + +_ANNOTATETEXTRESPONSE = _descriptor.Descriptor( + name='AnnotateTextResponse', + full_name='google.cloud.language.v1beta2.AnnotateTextResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='sentences', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.sentences', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tokens', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.tokens', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entities', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.entities', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='document_sentiment', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.document_sentiment', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='language', full_name='google.cloud.language.v1beta2.AnnotateTextResponse.language', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=6256, + serialized_end=6537, +) + +_DOCUMENT.fields_by_name['type'].enum_type = _DOCUMENT_TYPE +_DOCUMENT_TYPE.containing_type = _DOCUMENT +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['content']) +_DOCUMENT.fields_by_name['content'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_DOCUMENT.oneofs_by_name['source'].fields.append( + _DOCUMENT.fields_by_name['gcs_content_uri']) +_DOCUMENT.fields_by_name['gcs_content_uri'].containing_oneof = _DOCUMENT.oneofs_by_name['source'] +_SENTENCE.fields_by_name['text'].message_type = _TEXTSPAN +_SENTENCE.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITY_METADATAENTRY.containing_type = _ENTITY +_ENTITY.fields_by_name['type'].enum_type = _ENTITY_TYPE +_ENTITY.fields_by_name['metadata'].message_type = _ENTITY_METADATAENTRY +_ENTITY.fields_by_name['mentions'].message_type = _ENTITYMENTION +_ENTITY.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITY_TYPE.containing_type = _ENTITY +_TOKEN.fields_by_name['text'].message_type = _TEXTSPAN +_TOKEN.fields_by_name['part_of_speech'].message_type = _PARTOFSPEECH +_TOKEN.fields_by_name['dependency_edge'].message_type = _DEPENDENCYEDGE +_PARTOFSPEECH.fields_by_name['tag'].enum_type = _PARTOFSPEECH_TAG +_PARTOFSPEECH.fields_by_name['aspect'].enum_type = _PARTOFSPEECH_ASPECT +_PARTOFSPEECH.fields_by_name['case'].enum_type = _PARTOFSPEECH_CASE +_PARTOFSPEECH.fields_by_name['form'].enum_type = _PARTOFSPEECH_FORM +_PARTOFSPEECH.fields_by_name['gender'].enum_type = _PARTOFSPEECH_GENDER +_PARTOFSPEECH.fields_by_name['mood'].enum_type = _PARTOFSPEECH_MOOD +_PARTOFSPEECH.fields_by_name['number'].enum_type = _PARTOFSPEECH_NUMBER +_PARTOFSPEECH.fields_by_name['person'].enum_type = _PARTOFSPEECH_PERSON +_PARTOFSPEECH.fields_by_name['proper'].enum_type = _PARTOFSPEECH_PROPER +_PARTOFSPEECH.fields_by_name['reciprocity'].enum_type = _PARTOFSPEECH_RECIPROCITY +_PARTOFSPEECH.fields_by_name['tense'].enum_type = _PARTOFSPEECH_TENSE +_PARTOFSPEECH.fields_by_name['voice'].enum_type = _PARTOFSPEECH_VOICE +_PARTOFSPEECH_TAG.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_ASPECT.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_CASE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_FORM.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_GENDER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_MOOD.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_NUMBER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PERSON.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_PROPER.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_RECIPROCITY.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_TENSE.containing_type = _PARTOFSPEECH +_PARTOFSPEECH_VOICE.containing_type = _PARTOFSPEECH +_DEPENDENCYEDGE.fields_by_name['label'].enum_type = _DEPENDENCYEDGE_LABEL +_DEPENDENCYEDGE_LABEL.containing_type = _DEPENDENCYEDGE +_ENTITYMENTION.fields_by_name['text'].message_type = _TEXTSPAN +_ENTITYMENTION.fields_by_name['type'].enum_type = _ENTITYMENTION_TYPE +_ENTITYMENTION.fields_by_name['sentiment'].message_type = _SENTIMENT +_ENTITYMENTION_TYPE.containing_type = _ENTITYMENTION +_ANALYZESENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESENTIMENTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +_ANALYZESENTIMENTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZEENTITYSENTIMENTRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANALYZEENTITIESREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZEENTITIESREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZEENTITIESRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANALYZESYNTAXREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANALYZESYNTAXREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANALYZESYNTAXRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANALYZESYNTAXRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTREQUEST_FEATURES.containing_type = _ANNOTATETEXTREQUEST +_ANNOTATETEXTREQUEST.fields_by_name['document'].message_type = _DOCUMENT +_ANNOTATETEXTREQUEST.fields_by_name['features'].message_type = _ANNOTATETEXTREQUEST_FEATURES +_ANNOTATETEXTREQUEST.fields_by_name['encoding_type'].enum_type = _ENCODINGTYPE +_ANNOTATETEXTRESPONSE.fields_by_name['sentences'].message_type = _SENTENCE +_ANNOTATETEXTRESPONSE.fields_by_name['tokens'].message_type = _TOKEN +_ANNOTATETEXTRESPONSE.fields_by_name['entities'].message_type = _ENTITY +_ANNOTATETEXTRESPONSE.fields_by_name['document_sentiment'].message_type = _SENTIMENT +DESCRIPTOR.message_types_by_name['Document'] = _DOCUMENT +DESCRIPTOR.message_types_by_name['Sentence'] = _SENTENCE +DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY +DESCRIPTOR.message_types_by_name['Token'] = _TOKEN +DESCRIPTOR.message_types_by_name['Sentiment'] = _SENTIMENT +DESCRIPTOR.message_types_by_name['PartOfSpeech'] = _PARTOFSPEECH +DESCRIPTOR.message_types_by_name['DependencyEdge'] = _DEPENDENCYEDGE +DESCRIPTOR.message_types_by_name['EntityMention'] = _ENTITYMENTION +DESCRIPTOR.message_types_by_name['TextSpan'] = _TEXTSPAN +DESCRIPTOR.message_types_by_name['AnalyzeSentimentRequest'] = _ANALYZESENTIMENTREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSentimentResponse'] = _ANALYZESENTIMENTRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeEntitySentimentRequest'] = _ANALYZEENTITYSENTIMENTREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeEntitySentimentResponse'] = _ANALYZEENTITYSENTIMENTRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesRequest'] = _ANALYZEENTITIESREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeEntitiesResponse'] = _ANALYZEENTITIESRESPONSE +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxRequest'] = _ANALYZESYNTAXREQUEST +DESCRIPTOR.message_types_by_name['AnalyzeSyntaxResponse'] = _ANALYZESYNTAXRESPONSE +DESCRIPTOR.message_types_by_name['AnnotateTextRequest'] = _ANNOTATETEXTREQUEST +DESCRIPTOR.message_types_by_name['AnnotateTextResponse'] = _ANNOTATETEXTRESPONSE +DESCRIPTOR.enum_types_by_name['EncodingType'] = _ENCODINGTYPE + +Document = _reflection.GeneratedProtocolMessageType('Document', (_message.Message,), dict( + DESCRIPTOR = _DOCUMENT, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents the input to API methods. + + + Attributes: + type: + Required. If the type is not set or is ``TYPE_UNSPECIFIED``, + returns an ``INVALID_ARGUMENT`` error. + source: + The source of the document: a string containing the content or + a Google Cloud Storage URI. + content: + The content of the input in string format. + gcs_content_uri: + The Google Cloud Storage URI where the file content is + located. This URI must be of the form: + gs://bucket\_name/object\_name. For more details, see + https://cloud.google.com/storage/docs/reference-uris. NOTE: + Cloud Storage object versioning is not supported. + language: + The language of the document (if not specified, the language + is automatically detected). Both ISO and BCP-47 language codes + are accepted. `Language Support + <https://cloud.google.com/natural-language/docs/languages>`__ + lists currently supported languages for each API method. If + the language (either specified by the caller or automatically + detected) is not supported by the called API method, an + ``INVALID_ARGUMENT`` error is returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Document) + )) +_sym_db.RegisterMessage(Document) + +Sentence = _reflection.GeneratedProtocolMessageType('Sentence', (_message.Message,), dict( + DESCRIPTOR = _SENTENCE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents a sentence in the input document. + + + Attributes: + text: + The sentence text. + sentiment: + For calls to [AnalyzeSentiment][] or if [AnnotateTextRequest.F + eatures.extract\_document\_sentiment][google.cloud.language.v1 + beta2.AnnotateTextRequest.Features.extract\_document\_sentimen + t] is set to true, this field will contain the sentiment for + the sentence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Sentence) + )) +_sym_db.RegisterMessage(Sentence) + +Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), dict( + + MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict( + DESCRIPTOR = _ENTITY_METADATAENTRY, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Entity.MetadataEntry) + )) + , + DESCRIPTOR = _ENTITY, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents a phrase in the text that is a known entity, such as a + person, an organization, or location. The API associates information, + such as salience and mentions, with entities. + + + Attributes: + name: + The representative name for the entity. + type: + The entity type. + metadata: + Metadata associated with the entity. Currently, Wikipedia + URLs and Knowledge Graph MIDs are provided, if available. The + associated keys are "wikipedia\_url" and "mid", respectively. + salience: + The salience score associated with the entity in the [0, 1.0] + range. The salience score for an entity provides information + about the importance or centrality of that entity to the + entire document text. Scores closer to 0 are less salient, + while scores closer to 1.0 are highly salient. + mentions: + The mentions of this entity in the input document. The API + currently supports proper noun mentions. + sentiment: + For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq + uest.Features.extract\_entity\_sentiment][google.cloud.languag + e.v1beta2.AnnotateTextRequest.Features.extract\_entity\_sentim + ent] is set to true, this field will contain the aggregate + sentiment expressed for this entity in the provided document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Entity) + )) +_sym_db.RegisterMessage(Entity) +_sym_db.RegisterMessage(Entity.MetadataEntry) + +Token = _reflection.GeneratedProtocolMessageType('Token', (_message.Message,), dict( + DESCRIPTOR = _TOKEN, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents the smallest syntactic building block of the text. + + + Attributes: + text: + The token text. + part_of_speech: + Parts of speech tag for this token. + dependency_edge: + Dependency tree parse for this token. + lemma: + `Lemma + <https://en.wikipedia.org/wiki/Lemma_%28morphology%29>`__ of + the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Token) + )) +_sym_db.RegisterMessage(Token) + +Sentiment = _reflection.GeneratedProtocolMessageType('Sentiment', (_message.Message,), dict( + DESCRIPTOR = _SENTIMENT, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents the feeling associated with the entire text or entities in + the text. + + + Attributes: + magnitude: + A non-negative number in the [0, +inf) range, which represents + the absolute magnitude of sentiment regardless of score + (positive or negative). + score: + Sentiment score between -1.0 (negative sentiment) and 1.0 + (positive sentiment). + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Sentiment) + )) +_sym_db.RegisterMessage(Sentiment) + +PartOfSpeech = _reflection.GeneratedProtocolMessageType('PartOfSpeech', (_message.Message,), dict( + DESCRIPTOR = _PARTOFSPEECH, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents part of speech information for a token. + + + Attributes: + tag: + The part of speech tag. + aspect: + The grammatical aspect. + case: + The grammatical case. + form: + The grammatical form. + gender: + The grammatical gender. + mood: + The grammatical mood. + number: + The grammatical number. + person: + The grammatical person. + proper: + The grammatical properness. + reciprocity: + The grammatical reciprocity. + tense: + The grammatical tense. + voice: + The grammatical voice. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.PartOfSpeech) + )) +_sym_db.RegisterMessage(PartOfSpeech) + +DependencyEdge = _reflection.GeneratedProtocolMessageType('DependencyEdge', (_message.Message,), dict( + DESCRIPTOR = _DEPENDENCYEDGE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents dependency parse tree information for a token. + + + Attributes: + head_token_index: + Represents the head of this token in the dependency tree. This + is the index of the token which has an arc going to this + token. The index is the position of the token in the array of + tokens returned by the API method. If this token is a root + token, then the ``head_token_index`` is its own index. + label: + The parse label for the token. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.DependencyEdge) + )) +_sym_db.RegisterMessage(DependencyEdge) + +EntityMention = _reflection.GeneratedProtocolMessageType('EntityMention', (_message.Message,), dict( + DESCRIPTOR = _ENTITYMENTION, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents a mention for an entity in the text. Currently, proper noun + mentions are supported. + + + Attributes: + text: + The mention text. + type: + The type of the entity mention. + sentiment: + For calls to [AnalyzeEntitySentiment][] or if [AnnotateTextReq + uest.Features.extract\_entity\_sentiment][google.cloud.languag + e.v1beta2.AnnotateTextRequest.Features.extract\_entity\_sentim + ent] is set to true, this field will contain the sentiment + expressed for this mention of the entity in the provided + document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.EntityMention) + )) +_sym_db.RegisterMessage(EntityMention) + +TextSpan = _reflection.GeneratedProtocolMessageType('TextSpan', (_message.Message,), dict( + DESCRIPTOR = _TEXTSPAN, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """Represents an output piece of text. + + + Attributes: + content: + The content of the output text. + begin_offset: + The API calculates the beginning offset of the content in the + original document according to the + [EncodingType][google.cloud.language.v1beta2.EncodingType] + specified in the API request. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.TextSpan) + )) +_sym_db.RegisterMessage(TextSpan) + +AnalyzeSentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The sentiment analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate sentence + offsets for the sentence sentiment. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSentimentRequest) + )) +_sym_db.RegisterMessage(AnalyzeSentimentRequest) + +AnalyzeSentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSentimentResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESENTIMENTRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The sentiment analysis response message. + + + Attributes: + document_sentiment: + The overall sentiment of the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + sentences: + The sentiment for all the sentences in the document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSentimentResponse) + )) +_sym_db.RegisterMessage(AnalyzeSentimentResponse) + +AnalyzeEntitySentimentRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITYSENTIMENTREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity-level sentiment analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest) + )) +_sym_db.RegisterMessage(AnalyzeEntitySentimentRequest) + +AnalyzeEntitySentimentResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitySentimentResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITYSENTIMENTRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity-level sentiment analysis response message. + + + Attributes: + entities: + The recognized entities in the input document with associated + sentiments. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse) + )) +_sym_db.RegisterMessage(AnalyzeEntitySentimentResponse) + +AnalyzeEntitiesRequest = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitiesRequest) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesRequest) + +AnalyzeEntitiesResponse = _reflection.GeneratedProtocolMessageType('AnalyzeEntitiesResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZEENTITIESRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The entity analysis response message. + + + Attributes: + entities: + The recognized entities in the input document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeEntitiesResponse) + )) +_sym_db.RegisterMessage(AnalyzeEntitiesResponse) + +AnalyzeSyntaxRequest = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxRequest', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The syntax analysis request message. + + + Attributes: + document: + Input document. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSyntaxRequest) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxRequest) + +AnalyzeSyntaxResponse = _reflection.GeneratedProtocolMessageType('AnalyzeSyntaxResponse', (_message.Message,), dict( + DESCRIPTOR = _ANALYZESYNTAXRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The syntax analysis response message. + + + Attributes: + sentences: + Sentences in the input document. + tokens: + Tokens, along with their syntactic information, in the input + document. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnalyzeSyntaxResponse) + )) +_sym_db.RegisterMessage(AnalyzeSyntaxResponse) + +AnnotateTextRequest = _reflection.GeneratedProtocolMessageType('AnnotateTextRequest', (_message.Message,), dict( + + Features = _reflection.GeneratedProtocolMessageType('Features', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTREQUEST_FEATURES, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """All available features for sentiment, syntax, and semantic analysis. + Setting each one to true will enable that specific analysis for the + input. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest.Features) + )) + , + DESCRIPTOR = _ANNOTATETEXTREQUEST, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The request message for the text annotation API, which can perform + multiple analysis types (sentiment, entities, and syntax) in one call. + + + Attributes: + extract_syntax: + Extract syntax information. + extract_entities: + Extract entities. + extract_document_sentiment: + Extract document-level sentiment. + extract_entity_sentiment: + Extract entities and their associated sentiment. + document: + Input document. + features: + The enabled features. + encoding_type: + The encoding type used by the API to calculate offsets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest) + )) +_sym_db.RegisterMessage(AnnotateTextRequest) +_sym_db.RegisterMessage(AnnotateTextRequest.Features) + +AnnotateTextResponse = _reflection.GeneratedProtocolMessageType('AnnotateTextResponse', (_message.Message,), dict( + DESCRIPTOR = _ANNOTATETEXTRESPONSE, + __module__ = 'google.cloud.proto.language.v1beta2.language_service_pb2' + , + __doc__ = """The text annotations response message. + + + Attributes: + sentences: + Sentences in the input document. Populated if the user enables + [AnnotateTextRequest.Features.extract\_syntax][google.cloud.la + nguage.v1beta2.AnnotateTextRequest.Features.extract\_syntax]. + tokens: + Tokens, along with their syntactic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_syntax][google.cloud.language.v1beta2.Annotat + eTextRequest.Features.extract\_syntax]. + entities: + Entities, along with their semantic information, in the input + document. Populated if the user enables [AnnotateTextRequest.F + eatures.extract\_entities][google.cloud.language.v1beta2.Annot + ateTextRequest.Features.extract\_entities]. + document_sentiment: + The overall sentiment for the document. Populated if the user + enables [AnnotateTextRequest.Features.extract\_document\_senti + ment][google.cloud.language.v1beta2.AnnotateTextRequest.Featur + es.extract\_document\_sentiment]. + language: + The language of the text, which will be the same as the + language specified in the request or, if not specified, the + automatically-detected language. See [Document.language][googl + e.cloud.language.v1beta2.Document.language] field for more + details. + """, + # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextResponse) + )) +_sym_db.RegisterMessage(AnnotateTextResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n!com.google.cloud.language.v1beta2B\024LanguageServiceProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;language')) +_ENTITY_METADATAENTRY.has_options = True +_ENTITY_METADATAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=AnalyzeSentimentRequest.SerializeToString, + response_deserializer=AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeEntitySentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=AnalyzeEntitySentimentRequest.SerializeToString, + response_deserializer=AnalyzeEntitySentimentResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=AnnotateTextRequest.SerializeToString, + response_deserializer=AnnotateTextResponse.FromString, + ) + + + class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=AnalyzeSentimentRequest.FromString, + response_serializer=AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=AnalyzeEntitiesRequest.FromString, + response_serializer=AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntitySentiment, + request_deserializer=AnalyzeEntitySentimentRequest.FromString, + response_serializer=AnalyzeEntitySentimentResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=AnalyzeSyntaxRequest.FromString, + response_serializer=AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=AnnotateTextRequest.FromString, + response_serializer=AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1beta2.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaLanguageServiceServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def AnnotateText(self, request, context): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaLanguageServiceStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + def AnalyzeSentiment(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the sentiment of the provided text. + """ + raise NotImplementedError() + AnalyzeSentiment.future = None + def AnalyzeEntities(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + raise NotImplementedError() + AnalyzeEntities.future = None + def AnalyzeEntitySentiment(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + raise NotImplementedError() + AnalyzeEntitySentiment.future = None + def AnalyzeSyntax(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + raise NotImplementedError() + AnalyzeSyntax.future = None + def AnnotateText(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + raise NotImplementedError() + AnnotateText.future = None + + + def beta_create_LanguageService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextRequest.FromString, + } + response_serializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextResponse.SerializeToString, + } + method_implementations = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): face_utilities.unary_unary_inline(servicer.AnalyzeEntities), + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeEntitySentiment), + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): face_utilities.unary_unary_inline(servicer.AnalyzeSentiment), + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): face_utilities.unary_unary_inline(servicer.AnalyzeSyntax), + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): face_utilities.unary_unary_inline(servicer.AnnotateText), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_LanguageService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxRequest.SerializeToString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextRequest.SerializeToString, + } + response_deserializers = { + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntities'): AnalyzeEntitiesResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeEntitySentiment'): AnalyzeEntitySentimentResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSentiment'): AnalyzeSentimentResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnalyzeSyntax'): AnalyzeSyntaxResponse.FromString, + ('google.cloud.language.v1beta2.LanguageService', 'AnnotateText'): AnnotateTextResponse.FromString, + } + cardinalities = { + 'AnalyzeEntities': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeEntitySentiment': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSentiment': cardinality.Cardinality.UNARY_UNARY, + 'AnalyzeSyntax': cardinality.Cardinality.UNARY_UNARY, + 'AnnotateText': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.cloud.language.v1beta2.LanguageService', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py b/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py new file mode 100644 index 000000000000..264d6d43f468 --- /dev/null +++ b/language/google/cloud/proto/language/v1beta2/language_service_pb2_grpc.py @@ -0,0 +1,122 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.language.v1beta2.language_service_pb2 as google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2 + + +class LanguageServiceStub(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.AnalyzeSentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSentiment', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentResponse.FromString, + ) + self.AnalyzeEntities = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntities', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesResponse.FromString, + ) + self.AnalyzeEntitySentiment = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeEntitySentiment', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentResponse.FromString, + ) + self.AnalyzeSyntax = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnalyzeSyntax', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxResponse.FromString, + ) + self.AnnotateText = channel.unary_unary( + '/google.cloud.language.v1beta2.LanguageService/AnnotateText', + request_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextResponse.FromString, + ) + + +class LanguageServiceServicer(object): + """Provides text analysis operations such as sentiment analysis and entity + recognition. + """ + + def AnalyzeSentiment(self, request, context): + """Analyzes the sentiment of the provided text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntities(self, request, context): + """Finds named entities (currently proper names and common nouns) in the text + along with entity types, salience, mentions for each entity, and + other properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeEntitySentiment(self, request, context): + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnalyzeSyntax(self, request, context): + """Analyzes the syntax of the text and provides sentence boundaries and + tokenization along with part of speech tags, dependency trees, and other + properties. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AnnotateText(self, request, context): + """A convenience method that provides all syntax, sentiment, and entity + features in one call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_LanguageServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'AnalyzeSentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSentiment, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSentimentResponse.SerializeToString, + ), + 'AnalyzeEntities': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntities, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitiesResponse.SerializeToString, + ), + 'AnalyzeEntitySentiment': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeEntitySentiment, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeEntitySentimentResponse.SerializeToString, + ), + 'AnalyzeSyntax': grpc.unary_unary_rpc_method_handler( + servicer.AnalyzeSyntax, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnalyzeSyntaxResponse.SerializeToString, + ), + 'AnnotateText': grpc.unary_unary_rpc_method_handler( + servicer.AnnotateText, + request_deserializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_language_dot_v1beta2_dot_language__service__pb2.AnnotateTextResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.language.v1beta2.LanguageService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/language/setup.py b/language/setup.py index d573938a7665..16ee4d5603ad 100644 --- a/language/setup.py +++ b/language/setup.py @@ -52,7 +52,12 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.25.0, < 0.26dev', + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', ] +EXTRAS_REQUIRE = { + ':python_version<"3.4"': ['enum34'], +} setup( name='google-cloud-language', @@ -62,8 +67,13 @@ namespace_packages=[ 'google', 'google.cloud', + 'google.cloud.gapic', + 'google.cloud.gapic.language', + 'google.cloud.proto', + 'google.cloud.proto.language', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, + extras_require=EXTRAS_REQUIRE, **SETUP_BASE ) diff --git a/language/tests/gapic/v1/language_service_smoke_test.py b/language/tests/gapic/v1/language_service_smoke_test.py new file mode 100644 index 000000000000..67839505c670 --- /dev/null +++ b/language/tests/gapic/v1/language_service_smoke_test.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import unittest + +from google.cloud.gapic.language.v1 import enums +from google.cloud.gapic.language.v1 import language_service_client +from google.cloud.proto.language.v1 import language_service_pb2 + + +class LanguageServiceSmokeTest(unittest.TestCase): + def test_analyze_sentiment(self): + + client = language_service_client.LanguageServiceClient() + content = 'Hello, world!' + type_ = enums.Document.Type.PLAIN_TEXT + document = language_service_pb2.Document(content=content, type=type_) + response = client.analyze_sentiment(document) diff --git a/language/tests/gapic/v1/test_language_service_client_v1.py b/language/tests/gapic/v1/test_language_service_client_v1.py new file mode 100644 index 000000000000..a0b1931727ce --- /dev/null +++ b/language/tests/gapic/v1/test_language_service_client_v1.py @@ -0,0 +1,232 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import mock +import unittest + +from google.gax import errors + +from google.cloud.gapic.language.v1 import enums +from google.cloud.gapic.language.v1 import language_service_client +from google.cloud.proto.language.v1 import language_service_pb2 + + +class CustomException(Exception): + pass + + +class TestLanguageServiceClient(unittest.TestCase): + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSentimentResponse( + language=language) + grpc_stub.AnalyzeSentiment.return_value = expected_response + + response = client.analyze_sentiment(document) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSentiment.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSentiment.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSentimentRequest( + document=document) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock exception response + grpc_stub.AnalyzeSentiment.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_sentiment, document) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeEntitiesResponse( + language=language) + grpc_stub.AnalyzeEntities.return_value = expected_response + + response = client.analyze_entities(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeEntities.assert_called_once() + args, kwargs = grpc_stub.AnalyzeEntities.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeEntities.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_entities, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSyntaxResponse( + language=language) + grpc_stub.AnalyzeSyntax.return_value = expected_response + + response = client.analyze_syntax(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSyntax.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSyntax.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeSyntax.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_syntax, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnnotateTextResponse( + language=language) + grpc_stub.AnnotateText.return_value = expected_response + + response = client.annotate_text(document, features, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnnotateText.assert_called_once() + args, kwargs = grpc_stub.AnnotateText.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnnotateText.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.annotate_text, document, + features, encoding_type) diff --git a/language/tests/gapic/v1beta2/language_service_smoke_test.py b/language/tests/gapic/v1beta2/language_service_smoke_test.py new file mode 100644 index 000000000000..d94531f88f75 --- /dev/null +++ b/language/tests/gapic/v1beta2/language_service_smoke_test.py @@ -0,0 +1,30 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import unittest + +from google.cloud.gapic.language.v1beta2 import enums +from google.cloud.gapic.language.v1beta2 import language_service_client +from google.cloud.proto.language.v1beta2 import language_service_pb2 + + +class LanguageServiceSmokeTest(unittest.TestCase): + def test_analyze_sentiment(self): + + client = language_service_client.LanguageServiceClient() + content = 'Hello, world!' + type_ = enums.Document.Type.PLAIN_TEXT + document = language_service_pb2.Document(content=content, type=type_) + response = client.analyze_sentiment(document) diff --git a/language/tests/gapic/v1beta2/test_language_service_client_v1beta2.py b/language/tests/gapic/v1beta2/test_language_service_client_v1beta2.py new file mode 100644 index 000000000000..fea1c572d4ce --- /dev/null +++ b/language/tests/gapic/v1beta2/test_language_service_client_v1beta2.py @@ -0,0 +1,283 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import mock +import unittest + +from google.gax import errors + +from google.cloud.gapic.language.v1beta2 import enums +from google.cloud.gapic.language.v1beta2 import language_service_client +from google.cloud.proto.language.v1beta2 import language_service_pb2 + + +class CustomException(Exception): + pass + + +class TestLanguageServiceClient(unittest.TestCase): + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSentimentResponse( + language=language) + grpc_stub.AnalyzeSentiment.return_value = expected_response + + response = client.analyze_sentiment(document) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSentiment.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSentiment.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSentimentRequest( + document=document) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_sentiment_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + + # Mock exception response + grpc_stub.AnalyzeSentiment.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_sentiment, document) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeEntitiesResponse( + language=language) + grpc_stub.AnalyzeEntities.return_value = expected_response + + response = client.analyze_entities(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeEntities.assert_called_once() + args, kwargs = grpc_stub.AnalyzeEntities.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeEntitiesRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entities_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeEntities.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_entities, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entity_sentiment(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeEntitySentimentResponse( + language=language) + grpc_stub.AnalyzeEntitySentiment.return_value = expected_response + + response = client.analyze_entity_sentiment(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeEntitySentiment.assert_called_once() + args, kwargs = grpc_stub.AnalyzeEntitySentiment.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeEntitySentimentRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_entity_sentiment_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeEntitySentiment.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_entity_sentiment, + document, encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnalyzeSyntaxResponse( + language=language) + grpc_stub.AnalyzeSyntax.return_value = expected_response + + response = client.analyze_syntax(document, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnalyzeSyntax.assert_called_once() + args, kwargs = grpc_stub.AnalyzeSyntax.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnalyzeSyntaxRequest( + document=document, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_analyze_syntax_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnalyzeSyntax.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.analyze_syntax, document, + encoding_type) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock response + language = 'language-1613589672' + expected_response = language_service_pb2.AnnotateTextResponse( + language=language) + grpc_stub.AnnotateText.return_value = expected_response + + response = client.annotate_text(document, features, encoding_type) + self.assertEqual(expected_response, response) + + grpc_stub.AnnotateText.assert_called_once() + args, kwargs = grpc_stub.AnnotateText.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = language_service_pb2.AnnotateTextRequest( + document=document, features=features, encoding_type=encoding_type) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_annotate_text_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = language_service_client.LanguageServiceClient() + + # Mock request + document = language_service_pb2.Document() + features = language_service_pb2.AnnotateTextRequest.Features() + encoding_type = enums.EncodingType.NONE + + # Mock exception response + grpc_stub.AnnotateText.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.annotate_text, document, + features, encoding_type) diff --git a/vision/setup.py b/vision/setup.py index aeabefeb86c1..3055a5130cff 100644 --- a/vision/setup.py +++ b/vision/setup.py @@ -26,7 +26,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.25.0, < 0.26dev', - 'google-gax >= 0.15.7, < 0.16dev', + 'google-gax >= 0.15.13, < 0.16dev', 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', ] EXTRAS_REQUIRE = { From 912f72e499e743a4d91c4ff4c3122f7e873b4894 Mon Sep 17 00:00:00 2001 From: Danny Hermes <daniel.j.hermes@gmail.com> Date: Wed, 26 Jul 2017 13:06:39 -0700 Subject: [PATCH 57/62] Cut release of vision API. (#3677) Also bumping the version on the uber-package. --- vision/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vision/setup.py b/vision/setup.py index 3055a5130cff..ad485c0e8642 100644 --- a/vision/setup.py +++ b/vision/setup.py @@ -37,7 +37,7 @@ author='Google Cloud Platform', author_email='googleapis-publisher@google.com', name='google-cloud-vision', - version='0.25.0', + version='0.25.1', description='Python Client for Google Cloud Vision', long_description=readme, namespace_packages=[ From 5f6d6b3b7bcdf74f0382434d0dad96bd8ff2bfee Mon Sep 17 00:00:00 2001 From: Angela Li <yanhuil@google.com> Date: Wed, 26 Jul 2017 13:51:56 -0700 Subject: [PATCH 58/62] Auto-generated trace library (GAPIC only) (#3512) --- docs/trace/apis.rst | 19 + docs/trace/conf.py | 311 +++++++ docs/trace/index.rst | 41 + docs/trace/starting.rst | 78 ++ nox.py | 2 +- trace/.coveragerc | 11 + trace/LICENSE | 201 +++++ trace/MANIFEST.in | 7 + trace/PUBLISHING.rst | 46 ++ trace/README.rst | 97 +++ trace/google/__init__.py | 1 + trace/google/cloud/__init__.py | 1 + trace/google/cloud/gapic/__init__.py | 1 + trace/google/cloud/gapic/trace/__init__.py | 1 + trace/google/cloud/gapic/trace/v1/__init__.py | 0 trace/google/cloud/gapic/trace/v1/enums.py | 53 ++ .../gapic/trace/v1/trace_service_client.py | 310 +++++++ .../trace/v1/trace_service_client_config.json | 43 + trace/google/cloud/proto/__init__.py | 1 + trace/google/cloud/proto/devtools/__init__.py | 1 + .../proto/devtools/cloudtrace/__init__.py | 1 + .../proto/devtools/cloudtrace/v1/__init__.py | 1 + .../proto/devtools/cloudtrace/v1/trace_pb2.py | 765 ++++++++++++++++++ .../devtools/cloudtrace/v1/trace_pb2_grpc.py | 93 +++ trace/google/cloud/trace.py | 24 + trace/google/cloud/trace/__init__.py | 18 + trace/google/cloud/trace/_gax.py | 213 +++++ trace/google/cloud/trace/client.py | 167 ++++ trace/google/cloud/trace_v1/__init__.py | 25 + trace/google/cloud/trace_v1/types.py | 28 + trace/nox.py | 79 ++ trace/setup.py | 44 + trace/tests/__init__.py | 13 + .../gapic/v1/test_trace_service_client_v1.py | 177 ++++ trace/tests/unit/test__gax.py | 429 ++++++++++ trace/tests/unit/test_client.py | 252 ++++++ 36 files changed, 3553 insertions(+), 1 deletion(-) create mode 100644 docs/trace/apis.rst create mode 100644 docs/trace/conf.py create mode 100644 docs/trace/index.rst create mode 100644 docs/trace/starting.rst create mode 100644 trace/.coveragerc create mode 100644 trace/LICENSE create mode 100644 trace/MANIFEST.in create mode 100644 trace/PUBLISHING.rst create mode 100644 trace/README.rst create mode 100644 trace/google/__init__.py create mode 100644 trace/google/cloud/__init__.py create mode 100644 trace/google/cloud/gapic/__init__.py create mode 100644 trace/google/cloud/gapic/trace/__init__.py create mode 100644 trace/google/cloud/gapic/trace/v1/__init__.py create mode 100644 trace/google/cloud/gapic/trace/v1/enums.py create mode 100644 trace/google/cloud/gapic/trace/v1/trace_service_client.py create mode 100644 trace/google/cloud/gapic/trace/v1/trace_service_client_config.json create mode 100644 trace/google/cloud/proto/__init__.py create mode 100644 trace/google/cloud/proto/devtools/__init__.py create mode 100644 trace/google/cloud/proto/devtools/cloudtrace/__init__.py create mode 100644 trace/google/cloud/proto/devtools/cloudtrace/v1/__init__.py create mode 100644 trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2.py create mode 100644 trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2_grpc.py create mode 100644 trace/google/cloud/trace.py create mode 100644 trace/google/cloud/trace/__init__.py create mode 100644 trace/google/cloud/trace/_gax.py create mode 100644 trace/google/cloud/trace/client.py create mode 100644 trace/google/cloud/trace_v1/__init__.py create mode 100644 trace/google/cloud/trace_v1/types.py create mode 100644 trace/nox.py create mode 100644 trace/setup.py create mode 100644 trace/tests/__init__.py create mode 100644 trace/tests/gapic/v1/test_trace_service_client_v1.py create mode 100644 trace/tests/unit/test__gax.py create mode 100644 trace/tests/unit/test_client.py diff --git a/docs/trace/apis.rst b/docs/trace/apis.rst new file mode 100644 index 000000000000..80a8d50c0c60 --- /dev/null +++ b/docs/trace/apis.rst @@ -0,0 +1,19 @@ +API Reference +============= + +APIs +---- + +.. autosummary:: + :toctree: + + google.cloud.gapic.trace.v1.trace_service_client + + +API types +~~~~~~~~~ + +.. autosummary:: + :toctree: + + google.cloud.gapic.trace.v1.enums diff --git a/docs/trace/conf.py b/docs/trace/conf.py new file mode 100644 index 000000000000..5eead079b01c --- /dev/null +++ b/docs/trace/conf.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-trace documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.15.4' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-trace' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# "<project> v<release> documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-trace-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-trace.tex', + u'google-cloud-trace Documentation', author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-trace', + u'google-cloud-trace Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-trace', + u'google-cloud-trace Documentation', author, + 'google-cloud-trace', + 'GAPIC library for the {metadata.shortName} v1 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/docs/trace/index.rst b/docs/trace/index.rst new file mode 100644 index 000000000000..08044709bcc6 --- /dev/null +++ b/docs/trace/index.rst @@ -0,0 +1,41 @@ +.. gapic-google-cloud-trace-v1 sphinx documentation master file + + +GAPIC library for the Stackdriver Trace API +============================================================================================================= + +This is the API documentation for ``gapic-google-cloud-trace-v1``. + +gapic-google-cloud-trace-v1 uses google-gax_ (Google API extensions) to provide an +easy-to-use client library for the `Stackdriver Trace API`_ (v1) defined in the googleapis_ git repository + + +.. _`google-gax`: https://github.com/googleapis/gax-python +.. _`googleapis`: https://github.com/googleapis/googleapis/tree/master/google/devtools/cloudtrace/v1 +.. _`Stackdriver Trace API`: https://developers.google.com/apis-explorer/?hl=en_US#p/cloudtrace/v1/ + + +APIs +---- + +.. autosummary:: + + google.cloud.gapic.trace.v1.trace_service_client + + +Contents +-------- + +.. toctree:: + + self + starting + apis + + +Indices and tables +------------------ + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/trace/starting.rst b/docs/trace/starting.rst new file mode 100644 index 000000000000..245fcfd68a87 --- /dev/null +++ b/docs/trace/starting.rst @@ -0,0 +1,78 @@ +Getting started +=============== + +gapic-google-cloud-trace-v1 will allow you to connect to the `Stackdriver Trace API`_ and access all its methods. In order to achieve this, you need to set up authentication as well as install the library locally. + +.. _`Stackdriver Trace API`: https://developers.google.com/apis-explorer/?hl=en_US#p/cloudtrace/v1/ + + +Installation +------------ + + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +~~~~~~~~~~ + +.. code-block:: console + + pip install virtualenv + virtualenv <your-env> + source <your-env>/bin/activate + <your-env>/bin/pip install gapic-google-cloud-trace-v1 + +Windows +~~~~~~~ + +.. code-block:: console + + pip install virtualenv + virtualenv <your-env> + <your-env>\Scripts\activate + <your-env>\Scripts\pip.exe install gapic-google-cloud-trace-v1 + + +Using the API +------------- + + +Authentication +~~~~~~~~~~~~~~ + +To authenticate all your API calls, first install and setup the `Google Cloud SDK`_. +Once done, you can then run the following command in your terminal: + +.. code-block:: console + + $ gcloud beta auth application-default login + +or + +.. code-block:: console + + $ gcloud auth login + +Please see `gcloud beta auth application-default login`_ document for the difference between these commands. + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _gcloud beta auth application-default login: https://cloud.google.com/sdk/gcloud/reference/beta/auth/application-default/login +.. code-block:: console + +At this point you are all set to continue. + + +Examples +~~~~~~~~ + +To see example usage, please read through the :doc:`API reference </apis>`. The +documentation for each API method includes simple examples. diff --git a/nox.py b/nox.py index 3d283c821bdc..25db4c616c4f 100644 --- a/nox.py +++ b/nox.py @@ -34,7 +34,7 @@ def docs(session): 'core/', 'bigquery/', 'bigtable/', 'datastore/', 'dns/', 'language/', 'logging/', 'error_reporting/', 'monitoring/', 'pubsub/', 'resource_manager/', 'runtimeconfig/', 'spanner/', 'speech/', - 'storage/', 'translate/', 'vision/', + 'storage/', 'trace/', 'translate/', 'vision/', ) session.install('-e', '.') diff --git a/trace/.coveragerc b/trace/.coveragerc new file mode 100644 index 000000000000..a54b99aa14b7 --- /dev/null +++ b/trace/.coveragerc @@ -0,0 +1,11 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/trace/LICENSE b/trace/LICENSE new file mode 100644 index 000000000000..724a8807144b --- /dev/null +++ b/trace/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/trace/MANIFEST.in b/trace/MANIFEST.in new file mode 100644 index 000000000000..8a2c2aa5ab99 --- /dev/null +++ b/trace/MANIFEST.in @@ -0,0 +1,7 @@ +include README.rst LICENSE +recursive-include tests * +global-include google *.json *.proto +graft google +global-exclude *.py[co] +global-exclude __pycache__ +prune .tox diff --git a/trace/PUBLISHING.rst b/trace/PUBLISHING.rst new file mode 100644 index 000000000000..a6d81225248b --- /dev/null +++ b/trace/PUBLISHING.rst @@ -0,0 +1,46 @@ +PUBLISHING +---------- + +Note: This folder has been generated by the GAPIC code generator. + +The instructions assumes that no changes have been made to folder and its +contents since it was created. + +PREREQUISITES +------------- + +- Python must installed +- [tox](https://testrun.org/tox/latest/) must be installed + + +TO PUBLISH +---------- + +- Make sure you have `an account`_ on pypi_. +- Publish your package using tox. +- *tox must be used here or the uploaded package will be invalid!!* + + :: + + tox -e upload-package + + +TO PUBLISH THE DOCS +------------------- + +- Create the docs + + :: + + tox -e docs + +- Publish them to pythonhosted.org + + :: + + tox -e upload-docs + + +_`Packaging and Distributing projects`: https://packaging.python.org/en/latest/distributing.html#uploading-your-project-to-pypi +_`an account`: https://pypi.python.org/pypi?%3Aaction=register_form +_pypi: http://pypi.python.org diff --git a/trace/README.rst b/trace/README.rst new file mode 100644 index 000000000000..39178ee440fc --- /dev/null +++ b/trace/README.rst @@ -0,0 +1,97 @@ +Python Client for Stackdriver Trace API (`Alpha`_) +================================================================================================== + +Idiomatic Python client for `Stackdriver Trace API`_ + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Stackdriver Trace API: https://cloud.google.com/trace +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/trace-usage +.. _Product Documentation: https://cloud.google.com/trace + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable the monitoring api.`_ +3. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable the trace api.: https://cloud.google.com/trace +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/google-cloud-auth + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv <your-env> + source <your-env>/bin/activate + <your-env>/bin/pip install gapic-google-cloud-trace-v1 + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv <your-env> + <your-env>\Scripts\activate + <your-env>\Scripts\pip.exe install gapic-google-cloud-trace-v1 + +Preview +~~~~~~~ + +TraceServiceClient +^^^^^^^^^^^^^^^^^^^^^^ + +.. code:: py + + from google.cloud.gapic.trace.v1 import trace_service_client + from google.gax import CallOptions, INITIAL_PAGE + client = trace_service_client.TraceServiceClient() + project_id = '' + + # Iterate over all results + for element in client.list_traces(project_id): + # process element + pass + + # Or iterate over results one page at a time + for page in client.list_traces(project_id, options=CallOptions(page_token=INITIAL_PAGE)): + for element in page: + # process element + pass + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Stackdriver Trace API + API to see other available methods on the client. +- Read the `Stackdriver Trace API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Stackdriver Trace API Product documentation: https://cloud.google.com/trace +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/trace/google/__init__.py b/trace/google/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/trace/google/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/trace/google/cloud/__init__.py b/trace/google/cloud/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/trace/google/cloud/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/trace/google/cloud/gapic/__init__.py b/trace/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/trace/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/trace/google/cloud/gapic/trace/__init__.py b/trace/google/cloud/gapic/trace/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/trace/google/cloud/gapic/trace/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/trace/google/cloud/gapic/trace/v1/__init__.py b/trace/google/cloud/gapic/trace/v1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/trace/google/cloud/gapic/trace/v1/enums.py b/trace/google/cloud/gapic/trace/v1/enums.py new file mode 100644 index 000000000000..c6cc48fb471a --- /dev/null +++ b/trace/google/cloud/gapic/trace/v1/enums.py @@ -0,0 +1,53 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class TraceSpan(object): + class SpanKind(object): + """ + Type of span. Can be used to specify additional relationships between spans + in addition to a parent/child relationship. + + Attributes: + SPAN_KIND_UNSPECIFIED (int): Unspecified. + RPC_SERVER (int): Indicates that the span covers server-side handling of an RPC or other + remote network request. + RPC_CLIENT (int): Indicates that the span covers the client-side wrapper around an RPC or + other remote request. + """ + SPAN_KIND_UNSPECIFIED = 0 + RPC_SERVER = 1 + RPC_CLIENT = 2 + + +class ListTracesRequest(object): + class ViewType(object): + """ + Type of data returned for traces in the list. + + Attributes: + VIEW_TYPE_UNSPECIFIED (int): Default is ``MINIMAL`` if unspecified. + MINIMAL (int): Minimal view of the trace record that contains only the project + and trace IDs. + ROOTSPAN (int): Root span view of the trace record that returns the root spans along + with the minimal trace data. + COMPLETE (int): Complete view of the trace record that contains the actual trace data. + This is equivalent to calling the REST ``get`` or RPC ``GetTrace`` method + using the ID of each listed trace. + """ + VIEW_TYPE_UNSPECIFIED = 0 + MINIMAL = 1 + ROOTSPAN = 2 + COMPLETE = 3 diff --git a/trace/google/cloud/gapic/trace/v1/trace_service_client.py b/trace/google/cloud/gapic/trace/v1/trace_service_client.py new file mode 100644 index 000000000000..22ef0eb1aec1 --- /dev/null +++ b/trace/google/cloud/gapic/trace/v1/trace_service_client.py @@ -0,0 +1,310 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/devtools/cloudtrace/v1/trace.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.devtools.cloudtrace.v1 TraceService API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.gapic.trace.v1 import enums +from google.cloud.proto.devtools.cloudtrace.v1 import trace_pb2 +from google.protobuf import timestamp_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class TraceServiceClient(object): + """ + This file describes an API for collecting and viewing traces and spans + within a trace. A Trace is a collection of spans corresponding to a single + operation or set of operations for an application. A span is an individual + timed event which forms a node of the trace tree. Spans for a single trace + may span multiple services. + """ + + SERVICE_ADDRESS = 'cloudtrace.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_traces': _PageDesc('page_token', 'next_page_token', 'traces') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/trace.append', + 'https://www.googleapis.com/auth/trace.readonly', ) + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A TraceServiceClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-trace', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'trace_service_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.devtools.cloudtrace.v1.TraceService', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.trace_service_stub = config.create_stub( + trace_pb2.TraceServiceStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._patch_traces = api_callable.create_api_call( + self.trace_service_stub.PatchTraces, + settings=defaults['patch_traces']) + self._get_trace = api_callable.create_api_call( + self.trace_service_stub.GetTrace, settings=defaults['get_trace']) + self._list_traces = api_callable.create_api_call( + self.trace_service_stub.ListTraces, + settings=defaults['list_traces']) + + # Service calls + def patch_traces(self, project_id, traces, options=None): + """ + Sends new traces to Stackdriver Trace or updates existing traces. If the ID + of a trace that you send matches that of an existing trace, any fields + in the existing trace and its spans are overwritten by the provided values, + and any new fields provided are merged with the existing trace data. If the + ID does not match, a new trace is created. + + Example: + >>> from google.cloud.gapic.trace.v1 import trace_service_client + >>> from google.cloud.proto.devtools.cloudtrace.v1 import trace_pb2 + >>> client = trace_service_client.TraceServiceClient() + >>> project_id = '' + >>> traces = trace_pb2.Traces() + >>> client.patch_traces(project_id, traces) + + Args: + project_id (string): ID of the Cloud project where the trace data is stored. + traces (:class:`google.cloud.proto.devtools.cloudtrace.v1.trace_pb2.Traces`): The body of the message. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = trace_pb2.PatchTracesRequest( + project_id=project_id, traces=traces) + self._patch_traces(request, options) + + def get_trace(self, project_id, trace_id, options=None): + """ + Gets a single trace by its ID. + + Example: + >>> from google.cloud.gapic.trace.v1 import trace_service_client + >>> client = trace_service_client.TraceServiceClient() + >>> project_id = '' + >>> trace_id = '' + >>> response = client.get_trace(project_id, trace_id) + + Args: + project_id (string): ID of the Cloud project where the trace data is stored. + trace_id (string): ID of the trace to return. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.devtools.cloudtrace.v1.trace_pb2.Trace` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = trace_pb2.GetTraceRequest( + project_id=project_id, trace_id=trace_id) + return self._get_trace(request, options) + + def list_traces(self, + project_id, + view=None, + page_size=None, + start_time=None, + end_time=None, + filter_=None, + order_by=None, + options=None): + """ + Returns of a list of traces that match the specified filter conditions. + + Example: + >>> from google.cloud.gapic.trace.v1 import trace_service_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = trace_service_client.TraceServiceClient() + >>> project_id = '' + >>> + >>> # Iterate over all results + >>> for element in client.list_traces(project_id): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_traces(project_id, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project_id (string): ID of the Cloud project where the trace data is stored. + view (enum :class:`google.cloud.gapic.trace.v1.enums.ListTracesRequest.ViewType`): Type of data returned for traces in the list. Optional. Default is + ``MINIMAL``. + page_size (int): Maximum number of traces to return. If not specified or <= 0, the + implementation selects a reasonable value. The implementation may + return fewer traces than the requested page size. Optional. + start_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): End of the time interval (inclusive) during which the trace data was + collected from the application. + end_time (:class:`google.protobuf.timestamp_pb2.Timestamp`): Start of the time interval (inclusive) during which the trace data was + collected from the application. + filter_ (string): An optional filter for the request. + order_by (string): Field used to sort the returned traces. Optional. + Can be one of the following: + + * ``trace_id`` + * ``name`` (``name`` field of root span in the trace) + * ``duration`` (difference between ``end_time`` and ``start_time`` fields of + :: + + the root span) + * ``start`` (``start_time`` field of the root span) + + Descending order can be specified by appending ``desc`` to the sort field + (for example, ``name desc``). + + Only one sort field is permitted. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.devtools.cloudtrace.v1.trace_pb2.Trace` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = trace_pb2.ListTracesRequest( + project_id=project_id, + view=view, + page_size=page_size, + start_time=start_time, + end_time=end_time, + filter=filter_, + order_by=order_by) + return self._list_traces(request, options) diff --git a/trace/google/cloud/gapic/trace/v1/trace_service_client_config.json b/trace/google/cloud/gapic/trace/v1/trace_service_client_config.json new file mode 100644 index 000000000000..5e826c186b13 --- /dev/null +++ b/trace/google/cloud/gapic/trace/v1/trace_service_client_config.json @@ -0,0 +1,43 @@ +{ + "interfaces": { + "google.devtools.cloudtrace.v1.TraceService": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.2, + "max_retry_delay_millis": 1000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.5, + "max_rpc_timeout_millis": 30000, + "total_timeout_millis": 45000 + } + }, + "methods": { + "PatchTraces": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetTrace": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTraces": { + "timeout_millis": 30000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/trace/google/cloud/proto/__init__.py b/trace/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/trace/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/trace/google/cloud/proto/devtools/__init__.py b/trace/google/cloud/proto/devtools/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/trace/google/cloud/proto/devtools/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/trace/google/cloud/proto/devtools/cloudtrace/__init__.py b/trace/google/cloud/proto/devtools/cloudtrace/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/trace/google/cloud/proto/devtools/cloudtrace/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/trace/google/cloud/proto/devtools/cloudtrace/v1/__init__.py b/trace/google/cloud/proto/devtools/cloudtrace/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/trace/google/cloud/proto/devtools/cloudtrace/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2.py b/trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2.py new file mode 100644 index 000000000000..389893c0d9fd --- /dev/null +++ b/trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2.py @@ -0,0 +1,765 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/devtools/cloudtrace/v1/trace.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/devtools/cloudtrace/v1/trace.proto', + package='google.devtools.cloudtrace.v1', + syntax='proto3', + serialized_pb=_b('\n5google/cloud/proto/devtools/cloudtrace/v1/trace.proto\x12\x1dgoogle.devtools.cloudtrace.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"f\n\x05Trace\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x10\n\x08trace_id\x18\x02 \x01(\t\x12\x37\n\x05spans\x18\x03 \x03(\x0b\x32(.google.devtools.cloudtrace.v1.TraceSpan\">\n\x06Traces\x12\x34\n\x06traces\x18\x01 \x03(\x0b\x32$.google.devtools.cloudtrace.v1.Trace\"\x9d\x03\n\tTraceSpan\x12\x0f\n\x07span_id\x18\x01 \x01(\x06\x12?\n\x04kind\x18\x02 \x01(\x0e\x32\x31.google.devtools.cloudtrace.v1.TraceSpan.SpanKind\x12\x0c\n\x04name\x18\x03 \x01(\t\x12.\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x16\n\x0eparent_span_id\x18\x06 \x01(\x06\x12\x44\n\x06labels\x18\x07 \x03(\x0b\x32\x34.google.devtools.cloudtrace.v1.TraceSpan.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"E\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x0e\n\nRPC_SERVER\x10\x01\x12\x0e\n\nRPC_CLIENT\x10\x02\"\xe7\x02\n\x11ListTracesRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12G\n\x04view\x18\x02 \x01(\x0e\x32\x39.google.devtools.cloudtrace.v1.ListTracesRequest.ViewType\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12.\n\nstart_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t\x12\x10\n\x08order_by\x18\x08 \x01(\t\"N\n\x08ViewType\x12\x19\n\x15VIEW_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07MINIMAL\x10\x01\x12\x0c\n\x08ROOTSPAN\x10\x02\x12\x0c\n\x08\x43OMPLETE\x10\x03\"c\n\x12ListTracesResponse\x12\x34\n\x06traces\x18\x01 \x03(\x0b\x32$.google.devtools.cloudtrace.v1.Trace\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"7\n\x0fGetTraceRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x10\n\x08trace_id\x18\x02 \x01(\t\"_\n\x12PatchTracesRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x35\n\x06traces\x18\x02 \x01(\x0b\x32%.google.devtools.cloudtrace.v1.Traces2\xd1\x03\n\x0cTraceService\x12\x9b\x01\n\nListTraces\x12\x30.google.devtools.cloudtrace.v1.ListTracesRequest\x1a\x31.google.devtools.cloudtrace.v1.ListTracesResponse\"(\x82\xd3\xe4\x93\x02\"\x12 /v1/projects/{project_id}/traces\x12\x95\x01\n\x08GetTrace\x12..google.devtools.cloudtrace.v1.GetTraceRequest\x1a$.google.devtools.cloudtrace.v1.Trace\"3\x82\xd3\xe4\x93\x02-\x12+/v1/projects/{project_id}/traces/{trace_id}\x12\x8a\x01\n\x0bPatchTraces\x12\x31.google.devtools.cloudtrace.v1.PatchTracesRequest\x1a\x16.google.protobuf.Empty\"0\x82\xd3\xe4\x93\x02*2 /v1/projects/{project_id}/traces:\x06tracesB\x92\x01\n!com.google.devtools.cloudtrace.v1B\nTraceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/devtools/cloudtrace/v1;cloudtrace\xaa\x02\x15Google.Cloud.Trace.V1b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + +_TRACESPAN_SPANKIND = _descriptor.EnumDescriptor( + name='SpanKind', + full_name='google.devtools.cloudtrace.v1.TraceSpan.SpanKind', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='SPAN_KIND_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RPC_SERVER', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RPC_CLIENT', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=693, + serialized_end=762, +) +_sym_db.RegisterEnumDescriptor(_TRACESPAN_SPANKIND) + +_LISTTRACESREQUEST_VIEWTYPE = _descriptor.EnumDescriptor( + name='ViewType', + full_name='google.devtools.cloudtrace.v1.ListTracesRequest.ViewType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VIEW_TYPE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MINIMAL', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ROOTSPAN', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='COMPLETE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1046, + serialized_end=1124, +) +_sym_db.RegisterEnumDescriptor(_LISTTRACESREQUEST_VIEWTYPE) + + +_TRACE = _descriptor.Descriptor( + name='Trace', + full_name='google.devtools.cloudtrace.v1.Trace', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.devtools.cloudtrace.v1.Trace.project_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='trace_id', full_name='google.devtools.cloudtrace.v1.Trace.trace_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='spans', full_name='google.devtools.cloudtrace.v1.Trace.spans', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=180, + serialized_end=282, +) + + +_TRACES = _descriptor.Descriptor( + name='Traces', + full_name='google.devtools.cloudtrace.v1.Traces', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='traces', full_name='google.devtools.cloudtrace.v1.Traces.traces', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=284, + serialized_end=346, +) + + +_TRACESPAN_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.devtools.cloudtrace.v1.TraceSpan.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.devtools.cloudtrace.v1.TraceSpan.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.devtools.cloudtrace.v1.TraceSpan.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=646, + serialized_end=691, +) + +_TRACESPAN = _descriptor.Descriptor( + name='TraceSpan', + full_name='google.devtools.cloudtrace.v1.TraceSpan', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='span_id', full_name='google.devtools.cloudtrace.v1.TraceSpan.span_id', index=0, + number=1, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='kind', full_name='google.devtools.cloudtrace.v1.TraceSpan.kind', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='google.devtools.cloudtrace.v1.TraceSpan.name', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_time', full_name='google.devtools.cloudtrace.v1.TraceSpan.start_time', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_time', full_name='google.devtools.cloudtrace.v1.TraceSpan.end_time', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='parent_span_id', full_name='google.devtools.cloudtrace.v1.TraceSpan.parent_span_id', index=5, + number=6, type=6, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.devtools.cloudtrace.v1.TraceSpan.labels', index=6, + number=7, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TRACESPAN_LABELSENTRY, ], + enum_types=[ + _TRACESPAN_SPANKIND, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=349, + serialized_end=762, +) + + +_LISTTRACESREQUEST = _descriptor.Descriptor( + name='ListTracesRequest', + full_name='google.devtools.cloudtrace.v1.ListTracesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.project_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='view', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.view', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.page_token', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_time', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.start_time', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_time', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.end_time', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.filter', index=6, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='order_by', full_name='google.devtools.cloudtrace.v1.ListTracesRequest.order_by', index=7, + number=8, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _LISTTRACESREQUEST_VIEWTYPE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=765, + serialized_end=1124, +) + + +_LISTTRACESRESPONSE = _descriptor.Descriptor( + name='ListTracesResponse', + full_name='google.devtools.cloudtrace.v1.ListTracesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='traces', full_name='google.devtools.cloudtrace.v1.ListTracesResponse.traces', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.devtools.cloudtrace.v1.ListTracesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1126, + serialized_end=1225, +) + + +_GETTRACEREQUEST = _descriptor.Descriptor( + name='GetTraceRequest', + full_name='google.devtools.cloudtrace.v1.GetTraceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.devtools.cloudtrace.v1.GetTraceRequest.project_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='trace_id', full_name='google.devtools.cloudtrace.v1.GetTraceRequest.trace_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1227, + serialized_end=1282, +) + + +_PATCHTRACESREQUEST = _descriptor.Descriptor( + name='PatchTracesRequest', + full_name='google.devtools.cloudtrace.v1.PatchTracesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project_id', full_name='google.devtools.cloudtrace.v1.PatchTracesRequest.project_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='traces', full_name='google.devtools.cloudtrace.v1.PatchTracesRequest.traces', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1284, + serialized_end=1379, +) + +_TRACE.fields_by_name['spans'].message_type = _TRACESPAN +_TRACES.fields_by_name['traces'].message_type = _TRACE +_TRACESPAN_LABELSENTRY.containing_type = _TRACESPAN +_TRACESPAN.fields_by_name['kind'].enum_type = _TRACESPAN_SPANKIND +_TRACESPAN.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TRACESPAN.fields_by_name['end_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TRACESPAN.fields_by_name['labels'].message_type = _TRACESPAN_LABELSENTRY +_TRACESPAN_SPANKIND.containing_type = _TRACESPAN +_LISTTRACESREQUEST.fields_by_name['view'].enum_type = _LISTTRACESREQUEST_VIEWTYPE +_LISTTRACESREQUEST.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LISTTRACESREQUEST.fields_by_name['end_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LISTTRACESREQUEST_VIEWTYPE.containing_type = _LISTTRACESREQUEST +_LISTTRACESRESPONSE.fields_by_name['traces'].message_type = _TRACE +_PATCHTRACESREQUEST.fields_by_name['traces'].message_type = _TRACES +DESCRIPTOR.message_types_by_name['Trace'] = _TRACE +DESCRIPTOR.message_types_by_name['Traces'] = _TRACES +DESCRIPTOR.message_types_by_name['TraceSpan'] = _TRACESPAN +DESCRIPTOR.message_types_by_name['ListTracesRequest'] = _LISTTRACESREQUEST +DESCRIPTOR.message_types_by_name['ListTracesResponse'] = _LISTTRACESRESPONSE +DESCRIPTOR.message_types_by_name['GetTraceRequest'] = _GETTRACEREQUEST +DESCRIPTOR.message_types_by_name['PatchTracesRequest'] = _PATCHTRACESREQUEST + +Trace = _reflection.GeneratedProtocolMessageType('Trace', (_message.Message,), dict( + DESCRIPTOR = _TRACE, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.Trace) + )) +_sym_db.RegisterMessage(Trace) + +Traces = _reflection.GeneratedProtocolMessageType('Traces', (_message.Message,), dict( + DESCRIPTOR = _TRACES, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.Traces) + )) +_sym_db.RegisterMessage(Traces) + +TraceSpan = _reflection.GeneratedProtocolMessageType('TraceSpan', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _TRACESPAN_LABELSENTRY, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.TraceSpan.LabelsEntry) + )) + , + DESCRIPTOR = _TRACESPAN, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.TraceSpan) + )) +_sym_db.RegisterMessage(TraceSpan) +_sym_db.RegisterMessage(TraceSpan.LabelsEntry) + +ListTracesRequest = _reflection.GeneratedProtocolMessageType('ListTracesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTRACESREQUEST, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.ListTracesRequest) + )) +_sym_db.RegisterMessage(ListTracesRequest) + +ListTracesResponse = _reflection.GeneratedProtocolMessageType('ListTracesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTRACESRESPONSE, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.ListTracesResponse) + )) +_sym_db.RegisterMessage(ListTracesResponse) + +GetTraceRequest = _reflection.GeneratedProtocolMessageType('GetTraceRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTRACEREQUEST, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.GetTraceRequest) + )) +_sym_db.RegisterMessage(GetTraceRequest) + +PatchTracesRequest = _reflection.GeneratedProtocolMessageType('PatchTracesRequest', (_message.Message,), dict( + DESCRIPTOR = _PATCHTRACESREQUEST, + __module__ = 'google.cloud.proto.devtools.cloudtrace.v1.trace_pb2' + # @@protoc_insertion_point(class_scope:google.devtools.cloudtrace.v1.PatchTracesRequest) + )) +_sym_db.RegisterMessage(PatchTracesRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n!com.google.devtools.cloudtrace.v1B\nTraceProtoP\001ZGgoogle.golang.org/genproto/googleapis/devtools/cloudtrace/v1;cloudtrace\252\002\025Google.Cloud.Trace.V1')) +_TRACESPAN_LABELSENTRY.has_options = True +_TRACESPAN_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class TraceServiceStub(object): + """This file describes an API for collecting and viewing traces and spans + within a trace. A Trace is a collection of spans corresponding to a single + operation or set of operations for an application. A span is an individual + timed event which forms a node of the trace tree. Spans for a single trace + may span multiple services. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListTraces = channel.unary_unary( + '/google.devtools.cloudtrace.v1.TraceService/ListTraces', + request_serializer=ListTracesRequest.SerializeToString, + response_deserializer=ListTracesResponse.FromString, + ) + self.GetTrace = channel.unary_unary( + '/google.devtools.cloudtrace.v1.TraceService/GetTrace', + request_serializer=GetTraceRequest.SerializeToString, + response_deserializer=Trace.FromString, + ) + self.PatchTraces = channel.unary_unary( + '/google.devtools.cloudtrace.v1.TraceService/PatchTraces', + request_serializer=PatchTracesRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class TraceServiceServicer(object): + """This file describes an API for collecting and viewing traces and spans + within a trace. A Trace is a collection of spans corresponding to a single + operation or set of operations for an application. A span is an individual + timed event which forms a node of the trace tree. Spans for a single trace + may span multiple services. + """ + + def ListTraces(self, request, context): + """Returns of a list of traces that match the specified filter conditions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTrace(self, request, context): + """Gets a single trace by its ID. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PatchTraces(self, request, context): + """Sends new traces to Stackdriver Trace or updates existing traces. If the ID + of a trace that you send matches that of an existing trace, any fields + in the existing trace and its spans are overwritten by the provided values, + and any new fields provided are merged with the existing trace data. If the + ID does not match, a new trace is created. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_TraceServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListTraces': grpc.unary_unary_rpc_method_handler( + servicer.ListTraces, + request_deserializer=ListTracesRequest.FromString, + response_serializer=ListTracesResponse.SerializeToString, + ), + 'GetTrace': grpc.unary_unary_rpc_method_handler( + servicer.GetTrace, + request_deserializer=GetTraceRequest.FromString, + response_serializer=Trace.SerializeToString, + ), + 'PatchTraces': grpc.unary_unary_rpc_method_handler( + servicer.PatchTraces, + request_deserializer=PatchTracesRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.devtools.cloudtrace.v1.TraceService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaTraceServiceServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """This file describes an API for collecting and viewing traces and spans + within a trace. A Trace is a collection of spans corresponding to a single + operation or set of operations for an application. A span is an individual + timed event which forms a node of the trace tree. Spans for a single trace + may span multiple services. + """ + def ListTraces(self, request, context): + """Returns of a list of traces that match the specified filter conditions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetTrace(self, request, context): + """Gets a single trace by its ID. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def PatchTraces(self, request, context): + """Sends new traces to Stackdriver Trace or updates existing traces. If the ID + of a trace that you send matches that of an existing trace, any fields + in the existing trace and its spans are overwritten by the provided values, + and any new fields provided are merged with the existing trace data. If the + ID does not match, a new trace is created. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaTraceServiceStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """This file describes an API for collecting and viewing traces and spans + within a trace. A Trace is a collection of spans corresponding to a single + operation or set of operations for an application. A span is an individual + timed event which forms a node of the trace tree. Spans for a single trace + may span multiple services. + """ + def ListTraces(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Returns of a list of traces that match the specified filter conditions. + """ + raise NotImplementedError() + ListTraces.future = None + def GetTrace(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets a single trace by its ID. + """ + raise NotImplementedError() + GetTrace.future = None + def PatchTraces(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Sends new traces to Stackdriver Trace or updates existing traces. If the ID + of a trace that you send matches that of an existing trace, any fields + in the existing trace and its spans are overwritten by the provided values, + and any new fields provided are merged with the existing trace data. If the + ID does not match, a new trace is created. + """ + raise NotImplementedError() + PatchTraces.future = None + + + def beta_create_TraceService_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.devtools.cloudtrace.v1.TraceService', 'GetTrace'): GetTraceRequest.FromString, + ('google.devtools.cloudtrace.v1.TraceService', 'ListTraces'): ListTracesRequest.FromString, + ('google.devtools.cloudtrace.v1.TraceService', 'PatchTraces'): PatchTracesRequest.FromString, + } + response_serializers = { + ('google.devtools.cloudtrace.v1.TraceService', 'GetTrace'): Trace.SerializeToString, + ('google.devtools.cloudtrace.v1.TraceService', 'ListTraces'): ListTracesResponse.SerializeToString, + ('google.devtools.cloudtrace.v1.TraceService', 'PatchTraces'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + } + method_implementations = { + ('google.devtools.cloudtrace.v1.TraceService', 'GetTrace'): face_utilities.unary_unary_inline(servicer.GetTrace), + ('google.devtools.cloudtrace.v1.TraceService', 'ListTraces'): face_utilities.unary_unary_inline(servicer.ListTraces), + ('google.devtools.cloudtrace.v1.TraceService', 'PatchTraces'): face_utilities.unary_unary_inline(servicer.PatchTraces), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_TraceService_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.devtools.cloudtrace.v1.TraceService', 'GetTrace'): GetTraceRequest.SerializeToString, + ('google.devtools.cloudtrace.v1.TraceService', 'ListTraces'): ListTracesRequest.SerializeToString, + ('google.devtools.cloudtrace.v1.TraceService', 'PatchTraces'): PatchTracesRequest.SerializeToString, + } + response_deserializers = { + ('google.devtools.cloudtrace.v1.TraceService', 'GetTrace'): Trace.FromString, + ('google.devtools.cloudtrace.v1.TraceService', 'ListTraces'): ListTracesResponse.FromString, + ('google.devtools.cloudtrace.v1.TraceService', 'PatchTraces'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + } + cardinalities = { + 'GetTrace': cardinality.Cardinality.UNARY_UNARY, + 'ListTraces': cardinality.Cardinality.UNARY_UNARY, + 'PatchTraces': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.devtools.cloudtrace.v1.TraceService', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2_grpc.py b/trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2_grpc.py new file mode 100644 index 000000000000..de3ca9f57728 --- /dev/null +++ b/trace/google/cloud/proto/devtools/cloudtrace/v1/trace_pb2_grpc.py @@ -0,0 +1,93 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.devtools.cloudtrace.v1.trace_pb2 as google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class TraceServiceStub(object): + """This file describes an API for collecting and viewing traces and spans + within a trace. A Trace is a collection of spans corresponding to a single + operation or set of operations for an application. A span is an individual + timed event which forms a node of the trace tree. Spans for a single trace + may span multiple services. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListTraces = channel.unary_unary( + '/google.devtools.cloudtrace.v1.TraceService/ListTraces', + request_serializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.ListTracesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.ListTracesResponse.FromString, + ) + self.GetTrace = channel.unary_unary( + '/google.devtools.cloudtrace.v1.TraceService/GetTrace', + request_serializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.GetTraceRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.Trace.FromString, + ) + self.PatchTraces = channel.unary_unary( + '/google.devtools.cloudtrace.v1.TraceService/PatchTraces', + request_serializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.PatchTracesRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class TraceServiceServicer(object): + """This file describes an API for collecting and viewing traces and spans + within a trace. A Trace is a collection of spans corresponding to a single + operation or set of operations for an application. A span is an individual + timed event which forms a node of the trace tree. Spans for a single trace + may span multiple services. + """ + + def ListTraces(self, request, context): + """Returns of a list of traces that match the specified filter conditions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTrace(self, request, context): + """Gets a single trace by its ID. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PatchTraces(self, request, context): + """Sends new traces to Stackdriver Trace or updates existing traces. If the ID + of a trace that you send matches that of an existing trace, any fields + in the existing trace and its spans are overwritten by the provided values, + and any new fields provided are merged with the existing trace data. If the + ID does not match, a new trace is created. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_TraceServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListTraces': grpc.unary_unary_rpc_method_handler( + servicer.ListTraces, + request_deserializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.ListTracesRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.ListTracesResponse.SerializeToString, + ), + 'GetTrace': grpc.unary_unary_rpc_method_handler( + servicer.GetTrace, + request_deserializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.GetTraceRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.Trace.SerializeToString, + ), + 'PatchTraces': grpc.unary_unary_rpc_method_handler( + servicer.PatchTraces, + request_deserializer=google_dot_cloud_dot_proto_dot_devtools_dot_cloudtrace_dot_v1_dot_trace__pb2.PatchTracesRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.devtools.cloudtrace.v1.TraceService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/trace/google/cloud/trace.py b/trace/google/cloud/trace.py new file mode 100644 index 000000000000..d2382ffab502 --- /dev/null +++ b/trace/google/cloud/trace.py @@ -0,0 +1,24 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.trace_v1 import enums +from google.cloud.trace_v1 import TraceServiceClient +from google.cloud.trace_v1 import types + + +__all__ = ( + 'enums', + 'TraceServiceClient', + 'types', +) diff --git a/trace/google/cloud/trace/__init__.py b/trace/google/cloud/trace/__init__.py new file mode 100644 index 000000000000..558d1302e596 --- /dev/null +++ b/trace/google/cloud/trace/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.trace.client import Client + + +__all__ = ['Client'] diff --git a/trace/google/cloud/trace/_gax.py b/trace/google/cloud/trace/_gax.py new file mode 100644 index 000000000000..b412b54ee856 --- /dev/null +++ b/trace/google/cloud/trace/_gax.py @@ -0,0 +1,213 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GAX Wrapper for interacting with the Stackdriver Trace API.""" + +from google.cloud.gapic.trace.v1 import trace_service_client +from google.cloud.proto.devtools.cloudtrace.v1 import trace_pb2 +from google.gax import CallOptions +from google.gax import INITIAL_PAGE +from google.cloud._helpers import make_secure_channel +from google.cloud._http import DEFAULT_USER_AGENT +from google.cloud.iterator import GAXIterator +from google.protobuf.json_format import MessageToDict +from google.protobuf.json_format import ParseDict + + +class _TraceAPI(object): + """Wrapper to help mapping trace-related APIs. + + See + https://cloud.google.com/trace/docs/reference/v1/rpc/google.devtools. + cloudtrace.v1 + + :type gax_api: + :class:`~google.cloud.gapic.trace.v1.trace_service_client. + TraceServiceClient` + :param gax_api: API object used to make GAX requests. + + :type client: :class:`~google.cloud.trace.client.Client` + :param client: The client that owns this API object. + """ + def __init__(self, gax_api, client): + self._gax_api = gax_api + self.client = client + + def patch_traces(self, project_id, traces, options=None): + """Sends new traces to Stackdriver Trace or updates existing traces. + + :type project_id: str + :param project_id: ID of the Cloud project where the trace data is + stored. + + :type traces: dict + :param traces: The traces to be patched in the API call. + + :type options: :class:`~google.gax.CallOptions` + :param options: (Optional) Overrides the default settings for this + call, e.g, timeout, retries etc. + """ + traces_pb = _traces_mapping_to_pb(traces) + self._gax_api.patch_traces(project_id, traces_pb, options) + + def get_trace(self, project_id, trace_id, options=None): + """Gets a single trace by its ID. + + :type project_id: str + :param project_id: ID of the Cloud project where the trace data is + stored. + + :type trace_id: str + :param trace_id: ID of the trace to return. + + :type options: :class:`~google.gax.CallOptions` + :param options: (Optional) Overrides the default settings for this + call, e.g, timeout, retries etc. + + :rtype: :dict + :returns: A Trace dict. + """ + trace_pb = self._gax_api.get_trace(project_id, trace_id, options) + trace_mapping = _parse_trace_pb(trace_pb) + return trace_mapping + + def list_traces( + self, + project_id, + view=None, + page_size=None, + start_time=None, + end_time=None, + filter_=None, + order_by=None, + page_token=None): + """Returns of a list of traces that match the specified filter + conditions. + + :type project_id: str + :param project_id: ID of the Cloud project where the trace data is + stored. + + :type view: :class:`google.cloud.gapic.trace.v1.enums. + ListTracesRequest.ViewType` + :param view: (Optional) Type of data returned for traces in the list. + Default is ``MINIMAL``. + + :type page_size: int + :param page_size: (Optional) Maximum number of traces to return. + If not specified or <= 0, the implementation selects + a reasonable value. The implementation may return + fewer traces than the requested page size. + + :type start_time: :class:`google.protobuf.timestamp_pb2.Timestamp` + :param start_time: (Optional) Start of the time interval (inclusive) + during which the trace data was collected from the + application. + + :type end_time: :class:`google.protobuf.timestamp_pb2.Timestamp` + :param end_time: (Optional) End of the time interval (inclusive) + during which the trace data was collected from the + application. + + :type filter_: str + :param filter_: (Optional) An optional filter for the request. + + :type order_by: str + :param order_by: (Optional) Field used to sort the returned traces. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Traces that match the specified filter conditions. + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + page_iter = self._gax_api.list_traces( + project_id=project_id, + view=view, + page_size=page_size, + start_time=start_time, + end_time=end_time, + filter_=filter_, + order_by=order_by, + options=options) + item_to_value = _item_to_mapping + return GAXIterator(self.client, page_iter, item_to_value) + + +def _parse_trace_pb(trace_pb): + """Parse a ``Trace`` protobuf to a dictionary. + + :type trace_pb: :class:`google.cloud.proto.devtools.cloudtrace.v1. + trace_pb2.Trace` + :param trace_pb: A trace protobuf instance. + + :rtype: dict + :returns: The converted trace dict. + """ + try: + return MessageToDict(trace_pb) + except TypeError: + raise + + +def _item_to_mapping(iterator, trace_pb): + """Helper callable function for the GAXIterator + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type trace_pb: :class:`google.cloud.proto.devtools.cloudtrace.v1. + trace_pb2.Trace` + :param trace_pb: A trace protobuf instance. + """ + mapping = _parse_trace_pb(trace_pb) + return mapping + + +def make_gax_trace_api(client): + """Create an instance of the GAX Trace API. + + :type client: :class:`~google.cloud.trace.client.Client` + :param client: The client that holds configuration details. + + :rtype: :class:`~google.cloud.trace._gax._TraceAPI` + :returns: A Trace API instance with the proper configurations. + """ + channel = make_secure_channel( + client._credentials, + DEFAULT_USER_AGENT, + trace_service_client.TraceServiceClient.SERVICE_ADDRESS) + generated = trace_service_client.TraceServiceClient( + channel=channel, + lib_name='gccl') + return _TraceAPI(generated, client) + + +def _traces_mapping_to_pb(traces_mapping): + """Convert a trace dict to protobuf. + + :type traces_mapping: dict + :param traces_mapping: A trace mapping. + + :rtype: class:`google.cloud.proto.devtools.cloudtrace.v1.trace_pb2.Traces` + :returns: The converted protobuf type traces. + """ + traces_pb = trace_pb2.Traces() + ParseDict(traces_mapping, traces_pb) + return traces_pb diff --git a/trace/google/cloud/trace/client.py b/trace/google/cloud/trace/client.py new file mode 100644 index 000000000000..d2104924a2bf --- /dev/null +++ b/trace/google/cloud/trace/client.py @@ -0,0 +1,167 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Stackdriver Trace API.""" + +from google.cloud.trace._gax import make_gax_trace_api +from google.cloud.client import ClientWithProject +from google.cloud._helpers import _datetime_to_pb_timestamp + + +class Client(ClientWithProject): + """Client to bundle configuration needed for API requests. + + :type project: str + :param project: The project which the client acts on behalf of. + If not passed, falls back to the default inferred from + the environment. + + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed, falls back to the default + inferred from the environment. + """ + _trace_api = None + + def __init__(self, project=None, credentials=None): + super(Client, self).__init__( + project=project, credentials=credentials) + + @property + def trace_api(self): + """Helper for trace-related API calls. + + See + https://cloud.google.com/trace/docs/reference/v1/rpc/google.devtools. + cloudtrace.v1 + """ + self._trace_api = make_gax_trace_api(self) + return self._trace_api + + def patch_traces(self, traces, project_id=None, options=None): + """Sends new traces to Stackdriver Trace or updates existing traces. + + :type traces: dict + :param traces: The traces to be patched in the API call. + + :type project_id: str + :param project_id: (Optional) ID of the Cloud project where the trace + data is stored. + + :type options: :class:`~google.gax.CallOptions` + :param options: (Optional) Overrides the default settings for this + call, e.g, timeout, retries etc. + """ + if project_id is None: + project_id = self.project + + self.trace_api.patch_traces( + project_id=project_id, + traces=traces, + options=options) + + def get_trace(self, trace_id, project_id=None, options=None): + """Gets a single trace by its ID. + + :type project_id: str + :param project_id: ID of the Cloud project where the trace data is + stored. + + :type trace_id: str + :param trace_id: ID of the trace to return. + + :type options: :class:`~google.gax.CallOptions` + :param options: (Optional) Overrides the default settings for this + call, e.g, timeout, retries etc. + + :rtype: dict + :returns: A Trace dict. + """ + if project_id is None: + project_id = self.project + + return self.trace_api.get_trace( + project_id=project_id, + trace_id=trace_id, + options=options) + + def list_traces( + self, + project_id=None, + view=None, + page_size=None, + start_time=None, + end_time=None, + filter_=None, + order_by=None, + page_token=None): + """Returns of a list of traces that match the filter conditions. + + :type project_id: str + :param project_id: (Optional) ID of the Cloud project where the trace + data is stored. + + :type view: :class:`google.cloud.gapic.trace.v1.enums. + ListTracesRequest.ViewType` + :param view: (Optional) Type of data returned for traces in the list. + Default is ``MINIMAL``. + + :type page_size: int + :param page_size: (Optional) Maximum number of traces to return. + If not specified or <= 0, the implementation selects + a reasonable value. The implementation may return + fewer traces than the requested page size. + + :type start_time: :class:`~datetime.datetime` + :param start_time: (Optional) Start of the time interval (inclusive) + during which the trace data was collected from the + application. + + :type end_time: :class:`~datetime.datetime` + :param end_time: (Optional) End of the time interval (inclusive) during + which the trace data was collected from the + application. + + :type filter_: str + :param filter_: (Optional) An optional filter for the request. + + :type order_by: str + :param order_by: (Optional) Field used to sort the returned traces. + + :type page_token: str + :param page_token: opaque marker for the next "page" of entries. If not + passed, the API will return the first page of + entries. + + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Traces that match the specified filter conditions. + """ + if project_id is None: + project_id = self.project + + if start_time is not None: + start_time = _datetime_to_pb_timestamp(start_time) + + if end_time is not None: + end_time = _datetime_to_pb_timestamp(end_time) + + return self.trace_api.list_traces( + project_id=project_id, + view=view, + page_size=page_size, + start_time=start_time, + end_time=end_time, + filter_=filter_, + order_by=order_by, + page_token=page_token) diff --git a/trace/google/cloud/trace_v1/__init__.py b/trace/google/cloud/trace_v1/__init__.py new file mode 100644 index 000000000000..8d4fa6bd3c39 --- /dev/null +++ b/trace/google/cloud/trace_v1/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.gapic.trace.v1.trace_service_client import TraceServiceClient +from google.cloud.gapic.trace.v1 import enums + +from google.cloud.trace_v1 import types + + +__all__ = ( + 'enums', + 'TraceServiceClient', + 'types', +) diff --git a/trace/google/cloud/trace_v1/types.py b/trace/google/cloud/trace_v1/types.py new file mode 100644 index 000000000000..ed6d5b585829 --- /dev/null +++ b/trace/google/cloud/trace_v1/types.py @@ -0,0 +1,28 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.cloud.proto.devtools.cloudtrace.v1 import trace_pb2 +from google.gax.utils.messages import get_messages + + +names = [] +for name, message in get_messages(trace_pb2).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/trace/nox.py b/trace/nox.py new file mode 100644 index 000000000000..08c69cb85df5 --- /dev/null +++ b/trace/nox.py @@ -0,0 +1,79 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import nox + + +LOCAL_DEPS = ('../core/',) + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) +def unit_tests(session, python_version): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python{}'.format(python_version) + + # Install all test dependencies, then install this package in-place. + session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.trace', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + 'tests/', + *session.posargs + ) + + +@nox.session +def lint(session): + """Run flake8. + Returns a failure if flake8 finds linting errors or sufficiently + serious code quality issues. + """ + session.interpreter = 'python3.6' + session.install('flake8', *LOCAL_DEPS) + session.install('.') + session.run('flake8', 'google/cloud/trace') + + +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + session.install('docutils', 'pygments') + session.run( + 'python', 'setup.py', 'check', '--restructuredtext', '--strict') + + +@nox.session +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') diff --git a/trace/setup.py b/trace/setup.py new file mode 100644 index 000000000000..aeeae31756e4 --- /dev/null +++ b/trace/setup.py @@ -0,0 +1,44 @@ +"""A setup module for the GAPIC Stackdriver Trace API library. + +See: +https://packaging.python.org/en/latest/distributing.html +https://github.com/pypa/sampleproject +""" + +from setuptools import setup, find_packages + +install_requires = [ + 'google-gax>=0.15.7, <0.16dev', + 'googleapis-common-protos[grpc]>=1.5.2, <2.0dev', + 'google-cloud-core >= 0.24.0, < 0.25dev', +] + +setup( + name='google-cloud-trace', + version='0.15.4', + author='Google Inc', + author_email='googleapis-packages@google.com', + classifiers=[ + 'Intended Audience :: Developers', + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: Implementation :: CPython', + ], + description='GAPIC library for the Stackdriver Trace API', + include_package_data=True, + long_description=open('README.rst').read(), + install_requires=install_requires, + license='Apache-2.0', + packages=find_packages(), + namespace_packages=[ + 'google', 'google.cloud', 'google.cloud.gapic', + 'google.cloud.gapic.trace' + ], + url='https://github.com/googleapis/googleapis') diff --git a/trace/tests/__init__.py b/trace/tests/__init__.py new file mode 100644 index 000000000000..7c07b241f066 --- /dev/null +++ b/trace/tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/trace/tests/gapic/v1/test_trace_service_client_v1.py b/trace/tests/gapic/v1/test_trace_service_client_v1.py new file mode 100644 index 000000000000..c9ca3b6d1629 --- /dev/null +++ b/trace/tests/gapic/v1/test_trace_service_client_v1.py @@ -0,0 +1,177 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import mock +import unittest + +from google.gax import errors + +from google.cloud.gapic.trace.v1 import trace_service_client +from google.cloud.proto.devtools.cloudtrace.v1 import trace_pb2 +from google.protobuf import empty_pb2 + + +class CustomException(Exception): + pass + + +class TestTraceServiceClient(unittest.TestCase): + @mock.patch('google.gax.config.create_stub', spec=True) + def test_patch_traces(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = trace_service_client.TraceServiceClient() + + # Mock request + project_id = 'projectId-1969970175' + traces = trace_pb2.Traces() + + client.patch_traces(project_id, traces) + + grpc_stub.PatchTraces.assert_called_once() + args, kwargs = grpc_stub.PatchTraces.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = trace_pb2.PatchTracesRequest( + project_id=project_id, traces=traces) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_patch_traces_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = trace_service_client.TraceServiceClient() + + # Mock request + project_id = 'projectId-1969970175' + traces = trace_pb2.Traces() + + # Mock exception response + grpc_stub.PatchTraces.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.patch_traces, project_id, + traces) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_get_trace(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = trace_service_client.TraceServiceClient() + + # Mock request + project_id = 'projectId-1969970175' + trace_id = 'traceId1270300245' + + # Mock response + project_id_2 = 'projectId2939242356' + trace_id_2 = 'traceId2987826376' + expected_response = trace_pb2.Trace( + project_id=project_id_2, trace_id=trace_id_2) + grpc_stub.GetTrace.return_value = expected_response + + response = client.get_trace(project_id, trace_id) + self.assertEqual(expected_response, response) + + grpc_stub.GetTrace.assert_called_once() + args, kwargs = grpc_stub.GetTrace.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = trace_pb2.GetTraceRequest( + project_id=project_id, trace_id=trace_id) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_get_trace_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = trace_service_client.TraceServiceClient() + + # Mock request + project_id = 'projectId-1969970175' + trace_id = 'traceId1270300245' + + # Mock exception response + grpc_stub.GetTrace.side_effect = CustomException() + + self.assertRaises(errors.GaxError, client.get_trace, project_id, + trace_id) + + @mock.patch('google.gax.config.create_stub', spec=True) + def test_list_traces(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = trace_service_client.TraceServiceClient() + + # Mock request + project_id = 'projectId-1969970175' + + # Mock response + next_page_token = '' + traces_element = trace_pb2.Trace() + traces = [traces_element] + expected_response = trace_pb2.ListTracesResponse( + next_page_token=next_page_token, traces=traces) + grpc_stub.ListTraces.return_value = expected_response + + paged_list_response = client.list_traces(project_id) + resources = list(paged_list_response) + self.assertEqual(1, len(resources)) + self.assertEqual(expected_response.traces[0], resources[0]) + + grpc_stub.ListTraces.assert_called_once() + args, kwargs = grpc_stub.ListTraces.call_args + self.assertEqual(len(args), 2) + self.assertEqual(len(kwargs), 1) + self.assertIn('metadata', kwargs) + actual_request = args[0] + + expected_request = trace_pb2.ListTracesRequest(project_id=project_id) + self.assertEqual(expected_request, actual_request) + + @mock.patch('google.gax.config.API_ERRORS', (CustomException, )) + @mock.patch('google.gax.config.create_stub', spec=True) + def test_list_traces_exception(self, mock_create_stub): + # Mock gRPC layer + grpc_stub = mock.Mock() + mock_create_stub.return_value = grpc_stub + + client = trace_service_client.TraceServiceClient() + + # Mock request + project_id = 'projectId-1969970175' + + # Mock exception response + grpc_stub.ListTraces.side_effect = CustomException() + + paged_list_response = client.list_traces(project_id) + self.assertRaises(errors.GaxError, list, paged_list_response) diff --git a/trace/tests/unit/test__gax.py b/trace/tests/unit/test__gax.py new file mode 100644 index 000000000000..3f950021b85e --- /dev/null +++ b/trace/tests/unit/test__gax.py @@ -0,0 +1,429 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + +from google.cloud._testing import _GAXBaseAPI + + +class _Base(object): + project = 'PROJECT' + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + +class Test__TraceAPI(_Base, unittest.TestCase): + + @staticmethod + def _get_target_class(): + from google.cloud.trace._gax import _TraceAPI + + return _TraceAPI + + def test_constructor(self): + gax_api = object() + client = object() + api = self._make_one(gax_api, client) + self.assertIs(api._gax_api, gax_api) + self.assertIs(api.client, client) + + def test_patch_traces(self): + from google.cloud.gapic.trace.v1 import trace_service_client + from google.cloud.proto.devtools.cloudtrace.v1.trace_pb2 import ( + TraceSpan, Trace, Traces) + from google.cloud.trace._gax import _traces_mapping_to_pb + from google.cloud._helpers import _datetime_to_pb_timestamp + + from datetime import datetime + + trace_id = 'test_trace_id' + span_id = 1234 + span_name = 'test_span_name' + start_time = datetime.utcnow() + end_time = datetime.utcnow() + + traces = { + 'traces': [ + { + 'projectId': self.project, + 'traceId': trace_id, + 'spans': [ + { + 'spanId': span_id, + 'name': span_name, + 'startTime': start_time.isoformat() + 'Z', + 'endTime': end_time.isoformat() + 'Z', + }, + ], + }, + ], + } + + traces_pb = _traces_mapping_to_pb(traces) + + gax_api = mock.Mock(spec=trace_service_client.TraceServiceClient) + api = self._make_one(gax_api, None) + api.patch_traces(project_id=self.project, traces=traces) + + gax_api.patch_traces.assert_called_with(self.project, traces_pb, None) + + call_args = gax_api.patch_traces.call_args[0] + self.assertEqual(len(call_args), 3) + traces_called = call_args[1] + self.assertEqual(len(traces_called.traces), 1) + trace = traces_called.traces[0] + + self.assertEqual(len(trace.spans), 1) + span = trace.spans[0] + + self.assertIsInstance(traces_called, Traces) + self.assertEqual(trace.project_id, self.project) + self.assertEqual(trace.trace_id, trace_id) + self.assertIsInstance(trace, Trace) + + self.assertEqual(span.span_id, span_id) + self.assertEqual(span.name, span_name) + self.assertEqual( + span.start_time, + _datetime_to_pb_timestamp(start_time)) + self.assertEqual( + span.end_time, + _datetime_to_pb_timestamp(end_time)) + self.assertIsInstance(span, TraceSpan) + + def test_get_trace(self): + from google.cloud.gapic.trace.v1 import trace_service_client + + trace_id = 'test_trace_id' + + gax_api = mock.Mock(spec=trace_service_client.TraceServiceClient) + api = self._make_one(gax_api, None) + patch = mock.patch('google.cloud.trace._gax._parse_trace_pb', + return_value='fake_pb_result') + + with patch: + api.get_trace(project_id=self.project, trace_id=trace_id) + + gax_api.get_trace.assert_called_with(self.project, trace_id, None) + + def _make_trace_pb( + self, + project, + trace_id, + span_id, + span_name, + start_time, + end_time, + parent_span_id, + labels): + from google.cloud.trace._gax import _traces_mapping_to_pb + + span_kind = 2 + + traces = { + 'traces': [ + { + 'projectId': project, + 'traceId': trace_id, + 'spans': [ + { + 'spanId': span_id, + 'name': span_name, + 'startTime': start_time, + 'endTime': end_time, + 'kind': span_kind, + 'parentSpanId': parent_span_id, + 'labels': labels, + }, + ], + }, + ], + } + + traces_pb = _traces_mapping_to_pb(traces) + trace_pb = traces_pb.traces + return trace_pb + + def test_list_traces_no_paging(self): + from google.cloud._testing import _GAXPageIterator + from google.cloud.gapic.trace.v1 import trace_service_client + from google.cloud.gapic.trace.v1.enums import ListTracesRequest as Enum + from google.gax import INITIAL_PAGE + + from datetime import datetime + + trace_id = 'test_trace_id' + span_id = 1234 + span_name = 'test_span_name' + span_kind = 'RPC_CLIENT' + parent_span_id = 123 + start_time = datetime.utcnow().isoformat() + 'Z' + end_time = datetime.utcnow().isoformat() + 'Z' + labels = { + '/http/status_code': '200', + '/component': 'HTTP load balancer', + } + size = 10 + view_type = Enum.ViewType.COMPLETE + + trace_pb = self._make_trace_pb( + self.project, + trace_id, + span_id, + span_name, + start_time, + end_time, + parent_span_id, + labels) + + response = _GAXPageIterator(trace_pb) + gax_api = mock.Mock(spec=trace_service_client.TraceServiceClient) + gax_api.list_traces.return_value = response + api = self._make_one(gax_api, None) + + iterator = api.list_traces( + project_id=self.project, + view=view_type, + page_size=size) + + traces = list(iterator) + + self.assertEqual(len(traces), 1) + trace = traces[0] + + self.assertEqual(len(trace['spans']), 1) + span = trace['spans'][0] + + self.assertEqual(trace['projectId'], self.project) + self.assertEqual(trace['traceId'], trace_id) + + self.assertEqual(span['spanId'], str(span_id)) + self.assertEqual(span['name'], span_name) + + self.assertEqual( + span['startTime'], start_time) + self.assertEqual( + span['endTime'], end_time) + self.assertEqual(span['kind'], span_kind) + self.assertEqual(span['parentSpanId'], str(parent_span_id)) + self.assertEqual(span['labels'], labels) + + call_args = gax_api.list_traces.call_args[1] + + self.assertEqual(call_args['project_id'], self.project) + self.assertEqual(call_args['view'], view_type) + self.assertEqual(call_args['page_size'], size) + self.assertIsNone(call_args['start_time']) + self.assertIsNone(call_args['end_time']) + self.assertIsNone(call_args['filter_']) + self.assertIsNone(call_args['order_by']) + self.assertEqual(call_args['options'].page_token, INITIAL_PAGE) + + def test_list_traces_with_paging(self): + from google.cloud._testing import _GAXPageIterator + from google.cloud.gapic.trace.v1 import trace_service_client + from google.cloud.gapic.trace.v1.enums import ListTracesRequest as Enum + + from datetime import datetime + + trace_id = 'test_trace_id' + span_id = 1234 + span_name = 'test_span_name' + span_kind = 'RPC_CLIENT' + parent_span_id = 123 + start_time = datetime.utcnow().isoformat() + 'Z' + end_time = datetime.utcnow().isoformat() + 'Z' + labels = { + '/http/status_code': '200', + '/component': 'HTTP load balancer', + } + size = 10 + view_type = Enum.ViewType.COMPLETE + token = 'TOKEN' + + trace_pb = self._make_trace_pb( + self.project, + trace_id, + span_id, + span_name, + start_time, + end_time, + parent_span_id, + labels) + + response = _GAXPageIterator(trace_pb) + gax_api = mock.Mock(spec=trace_service_client.TraceServiceClient) + gax_api.list_traces.return_value = response + api = self._make_one(gax_api, None) + + iterator = api.list_traces( + project_id=self.project, + view=view_type, + page_size=size, + page_token=token) + + traces = list(iterator) + + self.assertEqual(len(traces), 1) + trace = traces[0] + + self.assertEqual(len(trace['spans']), 1) + span = trace['spans'][0] + + self.assertEqual(trace['projectId'], self.project) + self.assertEqual(trace['traceId'], trace_id) + + self.assertEqual(span['spanId'], str(span_id)) + self.assertEqual(span['name'], span_name) + + self.assertEqual( + span['startTime'], start_time) + self.assertEqual( + span['endTime'], end_time) + self.assertEqual(span['kind'], span_kind) + self.assertEqual(span['parentSpanId'], str(parent_span_id)) + self.assertEqual(span['labels'], labels) + + call_args = gax_api.list_traces.call_args[1] + + self.assertEqual(call_args['project_id'], self.project) + self.assertEqual(call_args['view'], view_type) + self.assertEqual(call_args['page_size'], size) + self.assertIsNone(call_args['start_time']) + self.assertIsNone(call_args['end_time']) + self.assertIsNone(call_args['filter_']) + self.assertIsNone(call_args['order_by']) + self.assertEqual(call_args['options'].page_token, token) + + +class Test__parse_trace_pb(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.trace._gax import _parse_trace_pb + + return _parse_trace_pb(*args, **kwargs) + + def test_registered_type(self): + from google.cloud.proto.devtools.cloudtrace.v1.trace_pb2 import ( + TraceSpan, Trace) + from google.protobuf.timestamp_pb2 import Timestamp + + project = u'PROJECT' + trace_id = u'test_trace_id' + span_id = 1234 + span_name = u'test_span_name' + start_time = '2017-06-24T00:12:50.369990Z' + end_time = '2017-06-24T00:13:39.633255Z' + start_seconds = 1498263170 + start_nanos = 369990000 + end_seconds = 1498263219 + end_nanos = 633255000 + + start_time_pb = Timestamp(seconds=start_seconds, nanos=start_nanos) + end_time_pb = Timestamp(seconds=end_seconds, nanos=end_nanos) + + span_pb = TraceSpan( + span_id=span_id, + name=span_name, + start_time=start_time_pb, + end_time=end_time_pb) + + trace_pb = Trace( + project_id=project, + trace_id=trace_id, + spans=[span_pb]) + + parse_result = self._call_fut(trace_pb) + + expected_result = { + 'projectId': project, + 'traceId': trace_id, + 'spans': [ + { + 'spanId': str(span_id), + 'name': span_name, + 'startTime': start_time, + 'endTime': end_time, + }, + ], + } + + self.assertEqual(parse_result, expected_result) + + @mock.patch('google.cloud.trace._gax.MessageToDict', + side_effect=TypeError) + def test_unregistered_type(self, msg_to_dict_mock): + trace_pb = mock.Mock(spec=['HasField']) + trace_pb.HasField.return_value = False + with self.assertRaises(TypeError): + self._call_fut(trace_pb) + + +class Test_make_gax_trace_api(unittest.TestCase): + + def _call_fut(self, client): + from google.cloud.trace._gax import make_gax_trace_api + + return make_gax_trace_api(client) + + def test_it(self): + from google.cloud.trace._gax import _TraceAPI + from google.cloud._http import DEFAULT_USER_AGENT + + credentials = object() + client = mock.Mock(_credentials=credentials, spec=['_credentials']) + channels = [] + channel_args = [] + generated_api_kwargs = [] + channel_obj = object() + generated = object() + + def make_channel(*args): + channel_args.append(args) + return channel_obj + + def generated_api(channel=None, **kwargs): + channels.append(channel) + generated_api_kwargs.append(kwargs) + return generated + + host = 'foo.apis.invalid' + generated_api.SERVICE_ADDRESS = host + + patch_channel = mock.patch( + 'google.cloud.trace._gax.make_secure_channel', + new=make_channel) + + patch_api = mock.patch( + 'google.cloud.trace._gax.trace_service_client.TraceServiceClient', + new=generated_api) + + with patch_api: + with patch_channel: + trace_api = self._call_fut(client) + + self.assertEqual(channels, [channel_obj]) + self.assertEqual(channel_args, + [(credentials, DEFAULT_USER_AGENT, host)]) + + self.assertEqual(len(generated_api_kwargs), 1) + self.assertEqual(generated_api_kwargs[0]['lib_name'], 'gccl') + + self.assertIsInstance(trace_api, _TraceAPI) + self.assertIs(trace_api._gax_api, generated) + self.assertIs(trace_api.client, client) diff --git a/trace/tests/unit/test_client.py b/trace/tests/unit/test_client.py new file mode 100644 index 000000000000..62cc8ad8e9f2 --- /dev/null +++ b/trace/tests/unit/test_client.py @@ -0,0 +1,252 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +def _make_credentials(): + import google.auth.credentials + + return mock.Mock(spec=google.auth.credentials.Credentials) + + +class TestClient(unittest.TestCase): + + project = 'PROJECT' + + @staticmethod + def _get_target_class(): + from google.cloud.trace.client import Client + + return Client + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_constructor(self): + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + self.assertEqual(client.project, self.project) + + def test_trace_api(self): + clients = [] + api_obj = object() + + def make_api(client_obj): + clients.append(client_obj) + return api_obj + + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + + patch = mock.patch( + 'google.cloud.trace.client.make_gax_trace_api', + new=make_api) + + with patch: + api = client.trace_api + + self.assertIs(api, api_obj) + self.assertEqual(clients, [client]) + + def test_patch_traces_default(self): + from google.cloud.trace._gax import _TraceAPI + + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + traces = 'fake_traces_for_test' + + mock_trace_api = mock.Mock(spec=_TraceAPI) + mock_trace_api.patch_traces = mock.Mock() + patch = mock.patch('google.cloud.trace.client.make_gax_trace_api', return_value=mock_trace_api) + + with patch: + client.patch_traces(traces=traces) + + mock_trace_api.patch_traces.assert_called_with( + options=None, + project_id='PROJECT', + traces='fake_traces_for_test') + + def test_patch_traces_explicit(self): + from google.cloud.trace._gax import _TraceAPI + + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + traces = 'fake_traces_for_test' + + mock_trace_api = mock.Mock(spec=_TraceAPI) + mock_trace_api.patch_traces = mock.Mock() + patch = mock.patch('google.cloud.trace.client.make_gax_trace_api', return_value=mock_trace_api) + + with patch: + client.patch_traces( + project_id=self.project, + traces=traces) + + mock_trace_api.patch_traces.assert_called_with( + options=None, + project_id='PROJECT', + traces='fake_traces_for_test') + + def test_get_trace_default(self): + from google.cloud.trace._gax import _TraceAPI + + def get_trace(trace_id, project_id=None, options=None): + _get_trace_called_with = (trace_id, project_id, options) + return _get_trace_called_with + + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + trace_id = '5e6e73b4131303cb6f5c9dfbaf104e33' + + mock_trace_api = mock.Mock(spec=_TraceAPI) + mock_trace_api.get_trace = get_trace + patch = mock.patch('google.cloud.trace.client.make_gax_trace_api', + return_value=mock_trace_api) + + with patch: + get_trace_called_with = client.get_trace(trace_id=trace_id) + + self.assertEqual(get_trace_called_with, + (trace_id, self.project, None)) + + def test_get_trace_explicit(self): + from google.cloud.trace._gax import _TraceAPI + + def get_trace(trace_id, project_id=None, options=None): + _get_trace_called_with = (trace_id, project_id, options) + return _get_trace_called_with + + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + trace_id = '5e6e73b4131303cb6f5c9dfbaf104e33' + + mock_trace_api = mock.Mock(spec=_TraceAPI) + mock_trace_api.get_trace = get_trace + patch = mock.patch('google.cloud.trace.client.make_gax_trace_api', + return_value=mock_trace_api) + + with patch: + get_trace_called_with = client.get_trace( + trace_id=trace_id, + project_id=self.project) + + self.assertEqual(get_trace_called_with, + (trace_id, self.project, None)) + + def test_list_traces_default(self): + from google.cloud.trace._gax import _TraceAPI + + def list_traces( + project_id, + view=None, + page_size=None, + start_time=None, + end_time=None, + filter_=None, + order_by=None, + page_token=None): + _list_traces_called_with = ( + project_id, + view, + page_size, + start_time, + end_time, + filter_, + order_by, + page_token) + return _list_traces_called_with + + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + + mock_trace_api = mock.Mock(spec=_TraceAPI) + mock_trace_api.list_traces = list_traces + patch = mock.patch('google.cloud.trace.client.make_gax_trace_api', + return_value=mock_trace_api) + + with patch: + list_traces_called_with = client.list_traces() + + self.assertEqual(list_traces_called_with, ( + self.project, + None, None, None, None, None, None, None)) + + def test_list_traces_explicit(self): + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud.gapic.trace.v1.enums import ListTracesRequest as Enum + from google.cloud.trace._gax import _TraceAPI + + from datetime import datetime + + def list_traces( + project_id, + view=None, + page_size=None, + start_time=None, + end_time=None, + filter_=None, + order_by=None, + page_token=None): + _list_traces_called_with = ( + project_id, + view, + page_size, + start_time, + end_time, + filter_, + order_by, + page_token) + return _list_traces_called_with + + credentials = _make_credentials() + client = self._make_one(project=self.project, credentials=credentials) + + mock_trace_api = mock.Mock(spec=_TraceAPI) + mock_trace_api.list_traces = list_traces + patch = mock.patch('google.cloud.trace.client.make_gax_trace_api', + return_value=mock_trace_api) + + view = Enum.ViewType.COMPLETE + page_size = 10 + start_time = datetime.utcnow() + end_time = datetime.utcnow() + filter_ = '+span:span1' + order_by = 'traceId' + page_token = 'TOKEN' + + + with patch: + list_traces_called_with = client.list_traces( + project_id=self.project, + view=view, + page_size=page_size, + start_time=start_time, + end_time=end_time, + filter_=filter_, + order_by=order_by, + page_token=page_token) + + self.assertEqual(list_traces_called_with, ( + self.project, + view, + page_size, + _datetime_to_pb_timestamp(start_time), + _datetime_to_pb_timestamp(end_time), + filter_, + order_by, + page_token)) From 969921bf87d06a816035dffc298f45dcdc9431e8 Mon Sep 17 00:00:00 2001 From: Danny Hermes <daniel.j.hermes@gmail.com> Date: Wed, 26 Jul 2017 14:43:54 -0700 Subject: [PATCH 59/62] Simplifying Client constructor's for Bigtable and Spanner. (#3672) * Simplifying Client constructor's for Bigtable and Spanner. * Fixing Bigtable unit tests after Client re-factor. Also slightly changing the Client constructor so that it only called `with_scopes()` one time on the credentials (was previously calling with `SCOPE=None` and then again with the custom scope for the instance) * Fixing Spanner unit tests after Client re-factor. Also slightly changing the `copy()` method so that it just passes the same credentials instance. Also updating `nox` config to allow session `posargs`. * Removing unused imports after Bigtable/Spanner Client re-factor. --- bigtable/google/cloud/bigtable/client.py | 51 +-- bigtable/nox.py | 13 +- bigtable/tests/unit/test_client.py | 386 ++++++++++++----------- spanner/google/cloud/spanner/client.py | 45 ++- spanner/nox.py | 13 +- spanner/tests/unit/test_client.py | 56 ++-- 6 files changed, 297 insertions(+), 267 deletions(-) diff --git a/bigtable/google/cloud/bigtable/client.py b/bigtable/google/cloud/bigtable/client.py index 62877371a945..de6d0768266f 100644 --- a/bigtable/google/cloud/bigtable/client.py +++ b/bigtable/google/cloud/bigtable/client.py @@ -31,16 +31,13 @@ import os -import google.auth -import google.auth.credentials from google.gax.utils import metrics from google.longrunning import operations_grpc from google.cloud._helpers import make_insecure_stub from google.cloud._helpers import make_secure_stub from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.client import _ClientFactoryMixin -from google.cloud.client import _ClientProjectMixin +from google.cloud.client import ClientWithProject from google.cloud.environment_vars import BIGTABLE_EMULATOR from google.cloud.bigtable import __version__ @@ -166,13 +163,13 @@ def _make_table_stub(client): client.emulator_host) -class Client(_ClientFactoryMixin, _ClientProjectMixin): +class Client(ClientWithProject): """Client for interacting with Google Cloud Bigtable API. .. note:: Since the Cloud Bigtable API requires the gRPC transport, no - ``http`` argument is accepted by this class. + ``_http`` argument is accepted by this class. :type project: :class:`str` or :func:`unicode <unicode>` :param project: (Optional) The ID of the project which owns the @@ -209,31 +206,21 @@ class Client(_ClientFactoryMixin, _ClientProjectMixin): def __init__(self, project=None, credentials=None, read_only=False, admin=False, user_agent=DEFAULT_USER_AGENT): - _ClientProjectMixin.__init__(self, project=project) - if credentials is None: - credentials, _ = google.auth.default() - if read_only and admin: raise ValueError('A read-only client cannot also perform' 'administrative actions.') - scopes = [] - if read_only: - scopes.append(READ_ONLY_SCOPE) - else: - scopes.append(DATA_SCOPE) - + # NOTE: We set the scopes **before** calling the parent constructor. + # It **may** use those scopes in ``with_scopes_if_required``. self._read_only = bool(read_only) - - if admin: - scopes.append(ADMIN_SCOPE) - self._admin = bool(admin) + self.SCOPE = self._get_scopes() - credentials = google.auth.credentials.with_scopes_if_required( - credentials, scopes) - - self._credentials = credentials + # NOTE: This API has no use for the _http argument, but sending it + # will have no impact since the _http() @property only lazily + # creates a working HTTP object. + super(Client, self).__init__( + project=project, credentials=credentials, _http=None) self.user_agent = user_agent self.emulator_host = os.getenv(BIGTABLE_EMULATOR) @@ -244,6 +231,22 @@ def __init__(self, project=None, credentials=None, self._operations_stub_internal = _make_operations_stub(self) self._table_stub_internal = _make_table_stub(self) + def _get_scopes(self): + """Get the scopes corresponding to admin / read-only state. + + Returns: + Tuple[str, ...]: The tuple of scopes. + """ + if self._read_only: + scopes = (READ_ONLY_SCOPE,) + else: + scopes = (DATA_SCOPE,) + + if self._admin: + scopes += (ADMIN_SCOPE,) + + return scopes + def copy(self): """Make a copy of this client. diff --git a/bigtable/nox.py b/bigtable/nox.py index b43e196a95ff..83b56e49d2df 100644 --- a/bigtable/nox.py +++ b/bigtable/nox.py @@ -37,10 +37,17 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.bigtable', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.bigtable', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', 'tests/unit', + *session.posargs ) diff --git a/bigtable/tests/unit/test_client.py b/bigtable/tests/unit/test_client.py index c3ab8d1ed888..9e0485a41554 100644 --- a/bigtable/tests/unit/test_client.py +++ b/bigtable/tests/unit/test_client.py @@ -256,170 +256,215 @@ def _get_target_class(): def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) - def _make_oneWithMocks(self, *args, **kwargs): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT - - mock_make_data_stub = _MakeStubMock() - mock_make_instance_stub = _MakeStubMock() - mock_make_operations_stub = _MakeStubMock() - mock_make_table_stub = _MakeStubMock() - with _Monkey(MUT, _make_data_stub=mock_make_data_stub, - _make_instance_stub=mock_make_instance_stub, - _make_operations_stub=mock_make_operations_stub, - _make_table_stub=mock_make_table_stub): - return self._make_one(*args, **kwargs) - - def _constructor_test_helper(self, expected_scopes, creds, - read_only=False, admin=False, - user_agent=None, expected_creds=None): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT - - user_agent = user_agent or MUT.DEFAULT_USER_AGENT - - mock_make_data_stub = _MakeStubMock() - mock_make_instance_stub = _MakeStubMock() - mock_make_operations_stub = _MakeStubMock() - mock_make_table_stub = _MakeStubMock() - with _Monkey(MUT, _make_data_stub=mock_make_data_stub, - _make_instance_stub=mock_make_instance_stub, - _make_operations_stub=mock_make_operations_stub, - _make_table_stub=mock_make_table_stub): - client = self._make_one(project=self.PROJECT, credentials=creds, - read_only=read_only, admin=admin, - user_agent=user_agent) - - # Verify the mocks. - self.assertEqual(mock_make_data_stub.calls, [client]) - if admin: - self.assertSequenceEqual(mock_make_instance_stub.calls, [client]) - self.assertSequenceEqual(mock_make_operations_stub.calls, [client]) - self.assertSequenceEqual(mock_make_table_stub.calls, [client]) - else: - self.assertSequenceEqual(mock_make_instance_stub.calls, []) - self.assertSequenceEqual(mock_make_operations_stub.calls, []) - self.assertSequenceEqual(mock_make_table_stub.calls, []) - - expected_creds = expected_creds or creds.with_scopes.return_value - self.assertIs(client._credentials, expected_creds) + @mock.patch('google.cloud.bigtable.client._make_table_stub') + @mock.patch('google.cloud.bigtable.client._make_operations_stub') + @mock.patch('google.cloud.bigtable.client._make_instance_stub') + @mock.patch('google.cloud.bigtable.client._make_data_stub') + def _make_one_with_mocks( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub, + *args, **kwargs): + return self._make_one(*args, **kwargs) + + @mock.patch('google.cloud.bigtable.client._make_table_stub') + @mock.patch('google.cloud.bigtable.client._make_operations_stub') + @mock.patch('google.cloud.bigtable.client._make_instance_stub') + @mock.patch('google.cloud.bigtable.client._make_data_stub') + def test_constructor_default_scopes( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub): + from google.cloud.bigtable.client import DATA_SCOPE - if expected_scopes is not None: - creds.with_scopes.assert_called_once_with(expected_scopes) + expected_scopes = (DATA_SCOPE,) + credentials = _make_credentials() + custom_user_agent = 'custom-application' + client = self._make_one( + project=self.PROJECT, credentials=credentials, + user_agent=custom_user_agent) self.assertEqual(client.project, self.PROJECT) - self.assertEqual(client.user_agent, user_agent) - # Check gRPC stubs (or mocks of them) are set - self.assertIs(client._data_stub, mock_make_data_stub.result) - if admin: - self.assertIs(client._instance_stub_internal, - mock_make_instance_stub.result) - self.assertIs(client._operations_stub_internal, - mock_make_operations_stub.result) - self.assertIs(client._table_stub_internal, - mock_make_table_stub.result) - else: - self.assertIsNone(client._instance_stub_internal) - self.assertIsNone(client._operations_stub_internal) - self.assertIsNone(client._table_stub_internal) - - def test_constructor_default_scopes(self): - from google.cloud.bigtable import client as MUT - - expected_scopes = [MUT.DATA_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds) - - def test_constructor_custom_user_agent(self): - from google.cloud.bigtable import client as MUT - - CUSTOM_USER_AGENT = 'custom-application' - expected_scopes = [MUT.DATA_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds, - user_agent=CUSTOM_USER_AGENT) - - def test_constructor_with_admin(self): - from google.cloud.bigtable import client as MUT - - expected_scopes = [MUT.DATA_SCOPE, MUT.ADMIN_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds, admin=True) + self.assertIs( + client._credentials, credentials.with_scopes.return_value) + self.assertIsNone(client._http_internal) + self.assertFalse(client._read_only) + self.assertFalse(client._admin) + self.assertEqual(client.SCOPE, expected_scopes) + self.assertEqual(client.user_agent, custom_user_agent) + self.assertIsNone(client.emulator_host) + self.assertIs(client._data_stub, _make_data_stub.return_value) + self.assertIsNone(client._instance_stub_internal) + self.assertIsNone(client._operations_stub_internal) + self.assertIsNone(client._table_stub_internal) + + # Check mocks. + credentials.with_scopes.assert_called_once_with(expected_scopes) + _make_data_stub.assert_called_once_with(client) + _make_instance_stub.assert_not_called() + _make_operations_stub.assert_not_called() + _make_table_stub.assert_not_called() + + @mock.patch('google.cloud.bigtable.client._make_table_stub') + @mock.patch('google.cloud.bigtable.client._make_operations_stub') + @mock.patch('google.cloud.bigtable.client._make_instance_stub') + @mock.patch('google.cloud.bigtable.client._make_data_stub') + def test_constructor_with_admin( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub): + from google.cloud._http import DEFAULT_USER_AGENT + from google.cloud.bigtable.client import ADMIN_SCOPE + from google.cloud.bigtable.client import DATA_SCOPE - def test_constructor_with_read_only(self): - from google.cloud.bigtable import client as MUT + expected_scopes = (DATA_SCOPE, ADMIN_SCOPE) + credentials = _make_credentials() + client = self._make_one( + project=self.PROJECT, credentials=credentials, admin=True) - expected_scopes = [MUT.READ_ONLY_SCOPE] - creds = _make_credentials() - self._constructor_test_helper(expected_scopes, creds, read_only=True) + self.assertEqual(client.project, self.PROJECT) + self.assertIs( + client._credentials, credentials.with_scopes.return_value) + self.assertIsNone(client._http_internal) + self.assertFalse(client._read_only) + self.assertTrue(client._admin) + self.assertEqual(client.SCOPE, expected_scopes) + self.assertEqual(client.user_agent, DEFAULT_USER_AGENT) + self.assertIsNone(client.emulator_host) + self.assertIs(client._data_stub, _make_data_stub.return_value) + self.assertIs( + client._instance_stub_internal, _make_instance_stub.return_value) + self.assertIs( + client._operations_stub_internal, + _make_operations_stub.return_value) + self.assertIs( + client._table_stub_internal, _make_table_stub.return_value) + + # Check mocks. + credentials.with_scopes.assert_called_once_with(expected_scopes) + _make_data_stub.assert_called_once_with(client) + _make_instance_stub.assert_called_once_with(client) + _make_operations_stub.assert_called_once_with(client) + _make_table_stub.assert_called_once_with(client) def test_constructor_both_admin_and_read_only(self): - creds = _make_credentials() + credentials = _make_credentials() with self.assertRaises(ValueError): - self._constructor_test_helper([], creds, admin=True, - read_only=True) + self._make_one( + project=self.PROJECT, credentials=credentials, + admin=True, read_only=True) - def test_constructor_implicit_credentials(self): + def test__get_scopes_default(self): from google.cloud.bigtable.client import DATA_SCOPE - creds = _make_credentials() - expected_scopes = [DATA_SCOPE] + client = self._make_one( + project=self.PROJECT, credentials=_make_credentials()) + self.assertEqual(client._get_scopes(), (DATA_SCOPE,)) - patch = mock.patch( - 'google.auth.default', return_value=(creds, None)) - with patch as default: - self._constructor_test_helper( - None, None, - expected_creds=creds.with_scopes.return_value) + def test__get_scopes_admin(self): + from google.cloud.bigtable.client import ADMIN_SCOPE + from google.cloud.bigtable.client import DATA_SCOPE - default.assert_called_once_with() - creds.with_scopes.assert_called_once_with(expected_scopes) + client = self._make_one( + project=self.PROJECT, credentials=_make_credentials(), + admin=True) + expected_scopes = (DATA_SCOPE, ADMIN_SCOPE) + self.assertEqual(client._get_scopes(), expected_scopes) + + def test__get_scopes_read_only(self): + from google.cloud.bigtable.client import READ_ONLY_SCOPE + + client = self._make_one( + project=self.PROJECT, credentials=_make_credentials(), + read_only=True) + self.assertEqual(client._get_scopes(), (READ_ONLY_SCOPE,)) + + def _copy_helper_check_stubs(self, client, new_client): + if client._admin: + # Check the instance stub. + self.assertIs( + client._instance_stub_internal, mock.sentinel.inst_stub1) + self.assertIs( + new_client._instance_stub_internal, mock.sentinel.inst_stub2) + self.assertIsNot( + new_client._instance_stub_internal, + client._instance_stub_internal) + # Check the operations stub. + self.assertIs( + client._operations_stub_internal, mock.sentinel.ops_stub1) + self.assertIs( + new_client._operations_stub_internal, mock.sentinel.ops_stub2) + self.assertIsNot( + new_client._operations_stub_internal, + client._operations_stub_internal) + # Check the table stub. + self.assertIs( + client._table_stub_internal, mock.sentinel.table_stub1) + self.assertIs( + new_client._table_stub_internal, mock.sentinel.table_stub2) + self.assertIsNot( + new_client._table_stub_internal, client._table_stub_internal) + else: + # Check the instance stub. + self.assertIsNone(client._instance_stub_internal) + self.assertIsNone(new_client._instance_stub_internal) + # Check the operations stub. + self.assertIsNone(client._operations_stub_internal) + self.assertIsNone(new_client._operations_stub_internal) + # Check the table stub. + self.assertIsNone(client._table_stub_internal) + self.assertIsNone(new_client._table_stub_internal) + + @mock.patch( + 'google.cloud.bigtable.client._make_table_stub', + side_effect=[mock.sentinel.table_stub1, mock.sentinel.table_stub2], + ) + @mock.patch( + 'google.cloud.bigtable.client._make_operations_stub', + side_effect=[mock.sentinel.ops_stub1, mock.sentinel.ops_stub2], + ) + @mock.patch( + 'google.cloud.bigtable.client._make_instance_stub', + side_effect=[mock.sentinel.inst_stub1, mock.sentinel.inst_stub2], + ) + @mock.patch( + 'google.cloud.bigtable.client._make_data_stub', + side_effect=[mock.sentinel.data_stub1, mock.sentinel.data_stub2], + ) + def _copy_test_helper( + self, _make_data_stub, _make_instance_stub, + _make_operations_stub, _make_table_stub, **kwargs): + credentials = _make_credentials() + # Make sure it "already" is scoped. + credentials.requires_scopes = False - def test_constructor_credentials_wo_create_scoped(self): - creds = _make_credentials() - expected_scopes = None - self._constructor_test_helper(expected_scopes, creds) + client = self._make_one( + project=self.PROJECT, credentials=credentials, **kwargs) + self.assertIs(client._credentials, credentials) - def _copy_test_helper(self, read_only=False, admin=False): - from google.cloud._testing import _Monkey - from google.cloud.bigtable import client as MUT - - credentials = _make_credentials() - client = self._make_oneWithMocks( - project=self.PROJECT, - credentials=credentials, - read_only=read_only, - admin=admin, - user_agent=self.USER_AGENT) - # Put some fake stubs in place so that we can verify they don't - # get copied. In the admin=False case, only the data stub will - # not be None, so we over-ride all the internal values. - client._data_stub = object() - client._instance_stub_internal = object() - client._operations_stub_internal = object() - client._table_stub_internal = object() - - mock_make_data_stub = _MakeStubMock() - mock_make_instance_stub = _MakeStubMock() - mock_make_operations_stub = _MakeStubMock() - mock_make_table_stub = _MakeStubMock() - with _Monkey(MUT, _make_data_stub=mock_make_data_stub, - _make_instance_stub=mock_make_instance_stub, - _make_operations_stub=mock_make_operations_stub, - _make_table_stub=mock_make_table_stub): - new_client = client.copy() + new_client = client.copy() self.assertEqual(new_client._admin, client._admin) self.assertEqual(new_client._credentials, client._credentials) self.assertEqual(new_client.project, client.project) self.assertEqual(new_client.user_agent, client.user_agent) # Make sure stubs are not preserved. - self.assertNotEqual(new_client._data_stub, client._data_stub) - self.assertNotEqual(new_client._instance_stub_internal, - client._instance_stub_internal) - self.assertNotEqual(new_client._operations_stub_internal, - client._operations_stub_internal) - self.assertNotEqual(new_client._table_stub_internal, - client._table_stub_internal) + self.assertIs(client._data_stub, mock.sentinel.data_stub1) + self.assertIs(new_client._data_stub, mock.sentinel.data_stub2) + self.assertIsNot(new_client._data_stub, client._data_stub) + self._copy_helper_check_stubs(client, new_client) + + # Check mocks. + credentials.with_scopes.assert_not_called() + stub_calls = [ + mock.call(client), + mock.call(new_client), + ] + self.assertEqual(_make_data_stub.mock_calls, stub_calls) + if client._admin: + self.assertEqual(_make_instance_stub.mock_calls, stub_calls) + self.assertEqual(_make_operations_stub.mock_calls, stub_calls) + self.assertEqual(_make_table_stub.mock_calls, stub_calls) + else: + _make_instance_stub.assert_not_called() + _make_operations_stub.assert_not_called() + _make_table_stub.assert_not_called() def test_copy(self): self._copy_test_helper() @@ -433,61 +478,61 @@ def test_copy_read_only(self): def test_credentials_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials) + client = self._make_one_with_mocks( + project=project, credentials=credentials) self.assertIs(client.credentials, credentials.with_scopes.return_value) def test_project_name_property(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials) + client = self._make_one_with_mocks( + project=project, credentials=credentials) project_name = 'projects/' + project self.assertEqual(client.project_name, project_name) def test_instance_stub_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=True) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=True) self.assertIs(client._instance_stub, client._instance_stub_internal) def test_instance_stub_non_admin_failure(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=False) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=False) with self.assertRaises(ValueError): getattr(client, '_instance_stub') def test_operations_stub_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=True) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=True) self.assertIs(client._operations_stub, client._operations_stub_internal) def test_operations_stub_non_admin_failure(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=False) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=False) with self.assertRaises(ValueError): getattr(client, '_operations_stub') def test_table_stub_getter(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=True) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=True) self.assertIs(client._table_stub, client._table_stub_internal) def test_table_stub_non_admin_failure(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_oneWithMocks(project=project, - credentials=credentials, admin=False) + client = self._make_one_with_mocks( + project=project, credentials=credentials, admin=False) with self.assertRaises(ValueError): getattr(client, '_table_stub') @@ -501,8 +546,8 @@ def test_instance_factory_defaults(self): INSTANCE_ID = 'instance-id' DISPLAY_NAME = 'display-name' credentials = _make_credentials() - client = self._make_oneWithMocks(project=PROJECT, - credentials=credentials) + client = self._make_one_with_mocks( + project=PROJECT, credentials=credentials) instance = client.instance(INSTANCE_ID, display_name=DISPLAY_NAME) @@ -523,8 +568,8 @@ def test_instance_factory_w_explicit_serve_nodes(self): LOCATION_ID = 'locname' SERVE_NODES = 5 credentials = _make_credentials() - client = self._make_oneWithMocks(project=PROJECT, - credentials=credentials) + client = self._make_one_with_mocks( + project=PROJECT, credentials=credentials) instance = client.instance( INSTANCE_ID, display_name=DISPLAY_NAME, @@ -554,7 +599,7 @@ def test_list_instances(self): 'projects/' + self.PROJECT + '/instances/' + INSTANCE_ID2) credentials = _make_credentials() - client = self._make_oneWithMocks( + client = self._make_one_with_mocks( project=self.PROJECT, credentials=credentials, admin=True, @@ -609,14 +654,3 @@ def __init__(self, credentials, user_agent, emulator_host=None): self.credentials = credentials self.user_agent = user_agent self.emulator_host = emulator_host - - -class _MakeStubMock(object): - - def __init__(self): - self.result = object() - self.calls = [] - - def __call__(self, client): - self.calls.append(client) - return self.result diff --git a/spanner/google/cloud/spanner/client.py b/spanner/google/cloud/spanner/client.py index b701b017abb0..6274d28d9e18 100644 --- a/spanner/google/cloud/spanner/client.py +++ b/spanner/google/cloud/spanner/client.py @@ -24,8 +24,6 @@ :class:`~google.cloud.spanner.database.Database` """ -import google.auth -import google.auth.credentials from google.gax import INITIAL_PAGE # pylint: disable=line-too-long from google.cloud.gapic.spanner_admin_database.v1.database_admin_client import ( # noqa @@ -35,8 +33,7 @@ # pylint: enable=line-too-long from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.client import _ClientFactoryMixin -from google.cloud.client import _ClientProjectMixin +from google.cloud.client import ClientWithProject from google.cloud.iterator import GAXIterator from google.cloud.spanner import __version__ from google.cloud.spanner._helpers import _options_with_prefix @@ -73,13 +70,13 @@ def from_pb(cls, config_pb): return cls(config_pb.name, config_pb.display_name) -class Client(_ClientFactoryMixin, _ClientProjectMixin): +class Client(ClientWithProject): """Client for interacting with Cloud Spanner API. .. note:: Since the Cloud Spanner API requires the gRPC transport, no - ``http`` argument is accepted by this class. + ``_http`` argument is accepted by this class. :type project: :class:`str` or :func:`unicode <unicode>` :param project: (Optional) The ID of the project which owns the @@ -104,21 +101,16 @@ class Client(_ClientFactoryMixin, _ClientProjectMixin): _database_admin_api = None _SET_PROJECT = True # Used by from_service_account_json() + SCOPE = (SPANNER_ADMIN_SCOPE,) + """The scopes required for Google Cloud Spanner.""" + def __init__(self, project=None, credentials=None, user_agent=DEFAULT_USER_AGENT): - - _ClientProjectMixin.__init__(self, project=project) - if credentials is None: - credentials, _ = google.auth.default() - - scopes = [ - SPANNER_ADMIN_SCOPE, - ] - - credentials = google.auth.credentials.with_scopes_if_required( - credentials, scopes) - - self._credentials = credentials + # NOTE: This API has no use for the _http argument, but sending it + # will have no impact since the _http() @property only lazily + # creates a working HTTP object. + super(Client, self).__init__( + project=project, credentials=credentials, _http=None) self.user_agent = user_agent @property @@ -181,19 +173,20 @@ def copy(self): :rtype: :class:`.Client` :returns: A copy of the current client. """ - credentials = self._credentials - copied_creds = credentials.create_scoped(credentials.scopes) return self.__class__( - self.project, - copied_creds, - self.user_agent, + project=self.project, + credentials=self._credentials, + user_agent=self.user_agent, ) def list_instance_configs(self, page_size=None, page_token=None): """List available instance configurations for the client's project. - See - https://cloud.google.com/spanner/reference/rpc/google.spanner.admin.database.v1#google.spanner.admin.database.v1.InstanceAdmin.ListInstanceConfigs + .. _RPC docs: https://cloud.google.com/spanner/docs/reference/rpc/\ + google.spanner.admin.instance.v1#google.spanner.admin.\ + instance.v1.InstanceAdmin.ListInstanceConfigs + + See `RPC docs`_. :type page_size: int :param page_size: (Optional) Maximum number of results to return. diff --git a/spanner/nox.py b/spanner/nox.py index 980bff46c85d..bdb2b4e4cbb6 100644 --- a/spanner/nox.py +++ b/spanner/nox.py @@ -38,10 +38,17 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.spanner', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.spanner', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', 'tests/unit', + *session.posargs ) diff --git a/spanner/tests/unit/test_client.py b/spanner/tests/unit/test_client.py index e5e90fd6b7ab..28eee9b78f56 100644 --- a/spanner/tests/unit/test_client.py +++ b/spanner/tests/unit/test_client.py @@ -15,6 +15,7 @@ import unittest import mock +import six def _make_credentials(): @@ -40,13 +41,13 @@ class TestClient(unittest.TestCase): TIMEOUT_SECONDS = 80 USER_AGENT = 'you-sir-age-int' - def _getTargetClass(self): + def _get_target_class(self): from google.cloud.spanner.client import Client return Client def _make_one(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + return self._get_target_class()(*args, **kwargs) def _constructor_test_helper(self, expected_scopes, creds, user_agent=None, @@ -70,9 +71,9 @@ def _constructor_test_helper(self, expected_scopes, creds, def test_constructor_default_scopes(self): from google.cloud.spanner import client as MUT - expected_scopes = [ + expected_scopes = ( MUT.SPANNER_ADMIN_SCOPE, - ] + ) creds = _make_credentials() self._constructor_test_helper(expected_scopes, creds) @@ -80,9 +81,9 @@ def test_constructor_custom_user_agent_and_timeout(self): from google.cloud.spanner import client as MUT CUSTOM_USER_AGENT = 'custom-application' - expected_scopes = [ + expected_scopes = ( MUT.SPANNER_ADMIN_SCOPE, - ] + ) creds = _make_credentials() self._constructor_test_helper(expected_scopes, creds, user_agent=CUSTOM_USER_AGENT) @@ -186,24 +187,27 @@ def __init__(self, *args, **kwargs): self.assertIs(api.kwargs['credentials'], client.credentials) def test_copy(self): - credentials = _Credentials('value') + credentials = _make_credentials() + # Make sure it "already" is scoped. + credentials.requires_scopes = False + client = self._make_one( project=self.PROJECT, credentials=credentials, user_agent=self.USER_AGENT) new_client = client.copy() - self.assertEqual(new_client._credentials, client._credentials) + self.assertIs(new_client._credentials, client._credentials) self.assertEqual(new_client.project, client.project) self.assertEqual(new_client.user_agent, client.user_agent) def test_credentials_property(self): - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) - self.assertIs(client.credentials, credentials) + self.assertIs(client.credentials, credentials.with_scopes.return_value) def test_project_name_property(self): - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) project_name = 'projects/' + self.PROJECT self.assertEqual(client.project_name, project_name) @@ -213,7 +217,7 @@ def test_list_instance_configs_wo_paging(self): from google.gax import INITIAL_PAGE from google.cloud.spanner.client import InstanceConfig - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -240,14 +244,13 @@ def test_list_instance_configs_wo_paging(self): [('google-cloud-resource-prefix', client.project_name)]) def test_list_instance_configs_w_paging(self): - import six from google.cloud._testing import _GAXPageIterator from google.cloud.spanner.client import InstanceConfig SIZE = 15 TOKEN_RETURNED = 'TOKEN_RETURNED' TOKEN_PASSED = 'TOKEN_PASSED' - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -280,7 +283,7 @@ def test_instance_factory_defaults(self): from google.cloud.spanner.instance import DEFAULT_NODE_COUNT from google.cloud.spanner.instance import Instance - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) instance = client.instance(self.INSTANCE_ID) @@ -295,7 +298,7 @@ def test_instance_factory_defaults(self): def test_instance_factory_explicit(self): from google.cloud.spanner.instance import Instance - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) instance = client.instance(self.INSTANCE_ID, self.CONFIGURATION_NAME, @@ -314,7 +317,7 @@ def test_list_instances_wo_paging(self): from google.gax import INITIAL_PAGE from google.cloud.spanner.instance import Instance - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -346,14 +349,13 @@ def test_list_instances_wo_paging(self): [('google-cloud-resource-prefix', client.project_name)]) def test_list_instances_w_paging(self): - import six from google.cloud._testing import _GAXPageIterator from google.cloud.spanner.instance import Instance SIZE = 15 TOKEN_RETURNED = 'TOKEN_RETURNED' TOKEN_PASSED = 'TOKEN_PASSED' - credentials = _Credentials() + credentials = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=credentials) client.connection = object() api = client._instance_admin_api = _FauxInstanceAdminAPI() @@ -389,22 +391,6 @@ def test_list_instances_w_paging(self): [('google-cloud-resource-prefix', client.project_name)]) -class _Credentials(object): - - scopes = None - - def __init__(self, access_token=None): - self._access_token = access_token - self._tokens = [] - - def create_scoped(self, scope): - self.scopes = scope - return self - - def __eq__(self, other): - return self._access_token == other._access_token - - class _FauxInstanceAdminAPI(object): def list_instance_configs(self, name, page_size, options): From 09182c87a4bd05339eb3a0861546aa58c10fd865 Mon Sep 17 00:00:00 2001 From: Tres Seaver <tseaver@palladion.com> Date: Wed, 26 Jul 2017 18:00:47 -0400 Subject: [PATCH 60/62] Implement multi-use snapshots (#3615) --- spanner/google/cloud/spanner/database.py | 70 +-- spanner/google/cloud/spanner/session.py | 31 +- spanner/google/cloud/spanner/snapshot.py | 85 +++- spanner/google/cloud/spanner/streamed.py | 12 +- spanner/google/cloud/spanner/transaction.py | 21 +- spanner/tests/system/test_system.py | 72 ++- spanner/tests/unit/test_database.py | 177 +------ spanner/tests/unit/test_session.py | 17 +- spanner/tests/unit/test_snapshot.py | 287 ++++++++++- spanner/tests/unit/test_streamed.py | 526 +++++++++++--------- spanner/tests/unit/test_transaction.py | 35 +- 11 files changed, 803 insertions(+), 530 deletions(-) diff --git a/spanner/google/cloud/spanner/database.py b/spanner/google/cloud/spanner/database.py index a449f304bf79..8df06812949d 100644 --- a/spanner/google/cloud/spanner/database.py +++ b/spanner/google/cloud/spanner/database.py @@ -380,8 +380,7 @@ def batch(self): """ return BatchCheckout(self) - def snapshot(self, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + def snapshot(self, **kw): """Return an object which wraps a snapshot. The wrapper *must* be used as a context manager, with the snapshot @@ -390,38 +389,15 @@ def snapshot(self, read_timestamp=None, min_read_timestamp=None, See https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.TransactionOptions.ReadOnly - If no options are passed, reads will use the ``strong`` model, reading - at a timestamp where all previously committed transactions are visible. - - :type read_timestamp: :class:`datetime.datetime` - :param read_timestamp: Execute all reads at the given timestamp. - - :type min_read_timestamp: :class:`datetime.datetime` - :param min_read_timestamp: Execute all reads at a - timestamp >= ``min_read_timestamp``. - - :type max_staleness: :class:`datetime.timedelta` - :param max_staleness: Read data at a - timestamp >= NOW - ``max_staleness`` seconds. - - :type exact_staleness: :class:`datetime.timedelta` - :param exact_staleness: Execute all reads at a timestamp that is - ``exact_staleness`` old. - - :rtype: :class:`~google.cloud.spanner.snapshot.Snapshot` - :returns: a snapshot bound to this session - :raises: :exc:`ValueError` if the session has not yet been created. + :type kw: dict + :param kw: + Passed through to + :class:`~google.cloud.spanner.snapshot.Snapshot` constructor. :rtype: :class:`~google.cloud.spanner.database.SnapshotCheckout` :returns: new wrapper """ - return SnapshotCheckout( - self, - read_timestamp=read_timestamp, - min_read_timestamp=min_read_timestamp, - max_staleness=max_staleness, - exact_staleness=exact_staleness, - ) + return SnapshotCheckout(self, **kw) class BatchCheckout(object): @@ -467,40 +443,20 @@ class SnapshotCheckout(object): :type database: :class:`~google.cloud.spannder.database.Database` :param database: database to use - :type read_timestamp: :class:`datetime.datetime` - :param read_timestamp: Execute all reads at the given timestamp. - - :type min_read_timestamp: :class:`datetime.datetime` - :param min_read_timestamp: Execute all reads at a - timestamp >= ``min_read_timestamp``. - - :type max_staleness: :class:`datetime.timedelta` - :param max_staleness: Read data at a - timestamp >= NOW - ``max_staleness`` seconds. - - :type exact_staleness: :class:`datetime.timedelta` - :param exact_staleness: Execute all reads at a timestamp that is - ``exact_staleness`` old. + :type kw: dict + :param kw: + Passed through to + :class:`~google.cloud.spanner.snapshot.Snapshot` constructor. """ - def __init__(self, database, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + def __init__(self, database, **kw): self._database = database self._session = None - self._read_timestamp = read_timestamp - self._min_read_timestamp = min_read_timestamp - self._max_staleness = max_staleness - self._exact_staleness = exact_staleness + self._kw = kw def __enter__(self): """Begin ``with`` block.""" session = self._session = self._database._pool.get() - return Snapshot( - session, - read_timestamp=self._read_timestamp, - min_read_timestamp=self._min_read_timestamp, - max_staleness=self._max_staleness, - exact_staleness=self._exact_staleness, - ) + return Snapshot(session, **self._kw) def __exit__(self, exc_type, exc_val, exc_tb): """End ``with`` block.""" diff --git a/spanner/google/cloud/spanner/session.py b/spanner/google/cloud/spanner/session.py index f25abdd6261a..19ff60de4e1b 100644 --- a/spanner/google/cloud/spanner/session.py +++ b/spanner/google/cloud/spanner/session.py @@ -139,30 +139,15 @@ def delete(self): raise NotFound(self.name) raise - def snapshot(self, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + def snapshot(self, **kw): """Create a snapshot to perform a set of reads with shared staleness. See https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.TransactionOptions.ReadOnly - If no options are passed, reads will use the ``strong`` model, reading - at a timestamp where all previously committed transactions are visible. - - :type read_timestamp: :class:`datetime.datetime` - :param read_timestamp: Execute all reads at the given timestamp. - - :type min_read_timestamp: :class:`datetime.datetime` - :param min_read_timestamp: Execute all reads at a - timestamp >= ``min_read_timestamp``. - - :type max_staleness: :class:`datetime.timedelta` - :param max_staleness: Read data at a - timestamp >= NOW - ``max_staleness`` seconds. - - :type exact_staleness: :class:`datetime.timedelta` - :param exact_staleness: Execute all reads at a timestamp that is - ``exact_staleness`` old. + :type kw: dict + :param kw: Passed through to + :class:`~google.cloud.spanner.snapshot.Snapshot` ctor. :rtype: :class:`~google.cloud.spanner.snapshot.Snapshot` :returns: a snapshot bound to this session @@ -171,11 +156,7 @@ def snapshot(self, read_timestamp=None, min_read_timestamp=None, if self._session_id is None: raise ValueError("Session has not been created.") - return Snapshot(self, - read_timestamp=read_timestamp, - min_read_timestamp=min_read_timestamp, - max_staleness=max_staleness, - exact_staleness=exact_staleness) + return Snapshot(self, **kw) def read(self, table, columns, keyset, index='', limit=0, resume_token=b''): @@ -292,7 +273,7 @@ def run_in_transaction(self, func, *args, **kw): txn = self.transaction() else: txn = self._transaction - if txn._id is None: + if txn._transaction_id is None: txn.begin() try: func(txn, *args, **kw) diff --git a/spanner/google/cloud/spanner/snapshot.py b/spanner/google/cloud/spanner/snapshot.py index 05fcba63f322..e0da23f3acd9 100644 --- a/spanner/google/cloud/spanner/snapshot.py +++ b/spanner/google/cloud/spanner/snapshot.py @@ -34,6 +34,10 @@ class _SnapshotBase(_SessionWrapper): :type session: :class:`~google.cloud.spanner.session.Session` :param session: the session used to perform the commit """ + _multi_use = False + _transaction_id = None + _read_request_count = 0 + def _make_txn_selector(self): # pylint: disable=redundant-returns-doc """Helper for :meth:`read` / :meth:`execute_sql`. @@ -70,7 +74,15 @@ def read(self, table, columns, keyset, index='', limit=0, :rtype: :class:`~google.cloud.spanner.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. + :raises: ValueError for reuse of single-use snapshots, or if a + transaction ID is pending for multiple-use snapshots. """ + if self._read_request_count > 0: + if not self._multi_use: + raise ValueError("Cannot re-use single-use snapshot.") + if self._transaction_id is None: + raise ValueError("Transaction ID pending.") + database = self._session._database api = database.spanner_api options = _options_with_prefix(database.name) @@ -81,7 +93,12 @@ def read(self, table, columns, keyset, index='', limit=0, transaction=transaction, index=index, limit=limit, resume_token=resume_token, options=options) - return StreamedResultSet(iterator) + self._read_request_count += 1 + + if self._multi_use: + return StreamedResultSet(iterator, source=self) + else: + return StreamedResultSet(iterator) def execute_sql(self, sql, params=None, param_types=None, query_mode=None, resume_token=b''): @@ -109,7 +126,15 @@ def execute_sql(self, sql, params=None, param_types=None, query_mode=None, :rtype: :class:`~google.cloud.spanner.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. + :raises: ValueError for reuse of single-use snapshots, or if a + transaction ID is pending for multiple-use snapshots. """ + if self._read_request_count > 0: + if not self._multi_use: + raise ValueError("Cannot re-use single-use snapshot.") + if self._transaction_id is None: + raise ValueError("Transaction ID pending.") + if params is not None: if param_types is None: raise ValueError( @@ -128,7 +153,12 @@ def execute_sql(self, sql, params=None, param_types=None, query_mode=None, transaction=transaction, params=params_pb, param_types=param_types, query_mode=query_mode, resume_token=resume_token, options=options) - return StreamedResultSet(iterator) + self._read_request_count += 1 + + if self._multi_use: + return StreamedResultSet(iterator, source=self) + else: + return StreamedResultSet(iterator) class Snapshot(_SnapshotBase): @@ -157,9 +187,16 @@ class Snapshot(_SnapshotBase): :type exact_staleness: :class:`datetime.timedelta` :param exact_staleness: Execute all reads at a timestamp that is ``exact_staleness`` old. + + :type multi_use: :class:`bool` + :param multi_use: If true, multipl :meth:`read` / :meth:`execute_sql` + calls can be performed with the snapshot in the + context of a read-only transaction, used to ensure + isolation / consistency. Incompatible with + ``max_staleness`` and ``min_read_timestamp``. """ def __init__(self, session, read_timestamp=None, min_read_timestamp=None, - max_staleness=None, exact_staleness=None): + max_staleness=None, exact_staleness=None, multi_use=False): super(Snapshot, self).__init__(session) opts = [ read_timestamp, min_read_timestamp, max_staleness, exact_staleness] @@ -168,14 +205,24 @@ def __init__(self, session, read_timestamp=None, min_read_timestamp=None, if len(flagged) > 1: raise ValueError("Supply zero or one options.") + if multi_use: + if min_read_timestamp is not None or max_staleness is not None: + raise ValueError( + "'multi_use' is incompatible with " + "'min_read_timestamp' / 'max_staleness'") + self._strong = len(flagged) == 0 self._read_timestamp = read_timestamp self._min_read_timestamp = min_read_timestamp self._max_staleness = max_staleness self._exact_staleness = exact_staleness + self._multi_use = multi_use def _make_txn_selector(self): """Helper for :meth:`read`.""" + if self._transaction_id is not None: + return TransactionSelector(id=self._transaction_id) + if self._read_timestamp: key = 'read_timestamp' value = _datetime_to_pb_timestamp(self._read_timestamp) @@ -194,4 +241,34 @@ def _make_txn_selector(self): options = TransactionOptions( read_only=TransactionOptions.ReadOnly(**{key: value})) - return TransactionSelector(single_use=options) + + if self._multi_use: + return TransactionSelector(begin=options) + else: + return TransactionSelector(single_use=options) + + def begin(self): + """Begin a transaction on the database. + + :rtype: bytes + :returns: the ID for the newly-begun transaction. + :raises: ValueError if the transaction is already begun, committed, + or rolled back. + """ + if not self._multi_use: + raise ValueError("Cannot call 'begin' single-use snapshots") + + if self._transaction_id is not None: + raise ValueError("Read-only transaction already begun") + + if self._read_request_count > 0: + raise ValueError("Read-only transaction already pending") + + database = self._session._database + api = database.spanner_api + options = _options_with_prefix(database.name) + txn_selector = self._make_txn_selector() + response = api.begin_transaction( + self._session.name, txn_selector.begin, options=options) + self._transaction_id = response.id + return self._transaction_id diff --git a/spanner/google/cloud/spanner/streamed.py b/spanner/google/cloud/spanner/streamed.py index 19333844b1c1..7aa0ca43156e 100644 --- a/spanner/google/cloud/spanner/streamed.py +++ b/spanner/google/cloud/spanner/streamed.py @@ -32,8 +32,11 @@ class StreamedResultSet(object): Iterator yielding :class:`google.cloud.proto.spanner.v1.result_set_pb2.PartialResultSet` instances. + + :type source: :class:`~google.cloud.spanner.snapshot.Snapshot` + :param source: Snapshot from which the result set was fetched. """ - def __init__(self, response_iterator): + def __init__(self, response_iterator, source=None): self._response_iterator = response_iterator self._rows = [] # Fully-processed rows self._counter = 0 # Counter for processed responses @@ -42,6 +45,7 @@ def __init__(self, response_iterator): self._resume_token = None # To resume from last received PRS self._current_row = [] # Accumulated values for incomplete row self._pending_chunk = None # Incomplete value + self._source = source # Source snapshot @property def rows(self): @@ -130,7 +134,11 @@ def consume_next(self): self._resume_token = response.resume_token if self._metadata is None: # first response - self._metadata = response.metadata + metadata = self._metadata = response.metadata + + source = self._source + if source is not None and source._transaction_id is None: + source._transaction_id = metadata.transaction.id if response.HasField('stats'): # last response self._stats = response.stats diff --git a/spanner/google/cloud/spanner/transaction.py b/spanner/google/cloud/spanner/transaction.py index 7c0272d41132..598fb0c30407 100644 --- a/spanner/google/cloud/spanner/transaction.py +++ b/spanner/google/cloud/spanner/transaction.py @@ -27,11 +27,8 @@ class Transaction(_SnapshotBase, _BatchBase): """Implement read-write transaction semantics for a session.""" committed = None """Timestamp at which the transaction was successfully committed.""" - - def __init__(self, session): - super(Transaction, self).__init__(session) - self._id = None - self._rolled_back = False + _rolled_back = False + _multi_use = True def _check_state(self): """Helper for :meth:`commit` et al. @@ -39,7 +36,7 @@ def _check_state(self): :raises: :exc:`ValueError` if the object's state is invalid for making API requests. """ - if self._id is None: + if self._transaction_id is None: raise ValueError("Transaction is not begun") if self.committed is not None: @@ -56,7 +53,7 @@ def _make_txn_selector(self): :returns: a selector configured for read-write transaction semantics. """ self._check_state() - return TransactionSelector(id=self._id) + return TransactionSelector(id=self._transaction_id) def begin(self): """Begin a transaction on the database. @@ -66,7 +63,7 @@ def begin(self): :raises: ValueError if the transaction is already begun, committed, or rolled back. """ - if self._id is not None: + if self._transaction_id is not None: raise ValueError("Transaction already begun") if self.committed is not None: @@ -82,8 +79,8 @@ def begin(self): read_write=TransactionOptions.ReadWrite()) response = api.begin_transaction( self._session.name, txn_options, options=options) - self._id = response.id - return self._id + self._transaction_id = response.id + return self._transaction_id def rollback(self): """Roll back a transaction on the database.""" @@ -91,7 +88,7 @@ def rollback(self): database = self._session._database api = database.spanner_api options = _options_with_prefix(database.name) - api.rollback(self._session.name, self._id, options=options) + api.rollback(self._session.name, self._transaction_id, options=options) self._rolled_back = True del self._session._transaction @@ -112,7 +109,7 @@ def commit(self): options = _options_with_prefix(database.name) response = api.commit( self._session.name, self._mutations, - transaction_id=self._id, options=options) + transaction_id=self._transaction_id, options=options) self.committed = _pb_timestamp_to_datetime( response.commit_timestamp) del self._session._transaction diff --git a/spanner/tests/system/test_system.py b/spanner/tests/system/test_system.py index b2f83ce9fa1d..f5d15d715ed5 100644 --- a/spanner/tests/system/test_system.py +++ b/spanner/tests/system/test_system.py @@ -18,6 +18,7 @@ import os import struct import threading +import time import unittest from google.cloud.proto.spanner.v1.type_pb2 import ARRAY @@ -687,6 +688,56 @@ def test_snapshot_read_w_various_staleness(self): rows = list(strong.read(self.TABLE, self.COLUMNS, self.ALL)) self._check_row_data(rows, all_data_rows) + def test_multiuse_snapshot_read_isolation_strong(self): + ROW_COUNT = 40 + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + strong = session.snapshot(multi_use=True) + + before = list(strong.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(strong.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(after, all_data_rows) + + def test_multiuse_snapshot_read_isolation_read_timestamp(self): + ROW_COUNT = 40 + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + read_ts = session.snapshot(read_timestamp=committed, multi_use=True) + + before = list(read_ts.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(read_ts.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(after, all_data_rows) + + def test_multiuse_snapshot_read_isolation_exact_staleness(self): + ROW_COUNT = 40 + + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + + time.sleep(1) + delta = datetime.timedelta(microseconds=1000) + + exact = session.snapshot(exact_staleness=delta, multi_use=True) + + before = list(exact.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(exact.read(self.TABLE, self.COLUMNS, self.ALL)) + self._check_row_data(after, all_data_rows) + def test_read_w_manual_consume(self): ROW_COUNT = 4000 session, committed = self._set_up_table(ROW_COUNT) @@ -778,7 +829,7 @@ def test_read_w_ranges(self): START = 1000 END = 2000 session, committed = self._set_up_table(ROW_COUNT) - snapshot = session.snapshot(read_timestamp=committed) + snapshot = session.snapshot(read_timestamp=committed, multi_use=True) all_data_rows = list(self._row_data(ROW_COUNT)) closed_closed = KeyRange(start_closed=[START], end_closed=[END]) @@ -836,6 +887,22 @@ def _check_sql_results(self, snapshot, sql, params, param_types, expected): sql, params=params, param_types=param_types)) self._check_row_data(rows, expected=expected) + def test_multiuse_snapshot_execute_sql_isolation_strong(self): + ROW_COUNT = 40 + SQL = 'SELECT * FROM {}'.format(self.TABLE) + session, committed = self._set_up_table(ROW_COUNT) + all_data_rows = list(self._row_data(ROW_COUNT)) + strong = session.snapshot(multi_use=True) + + before = list(strong.execute_sql(SQL)) + self._check_row_data(before, all_data_rows) + + with self._db.batch() as batch: + batch.delete(self.TABLE, self.ALL) + + after = list(strong.execute_sql(SQL)) + self._check_row_data(after, all_data_rows) + def test_execute_sql_returning_array_of_struct(self): SQL = ( "SELECT ARRAY(SELECT AS STRUCT C1, C2 " @@ -868,7 +935,8 @@ def test_execute_sql_w_query_param(self): self.ALL_TYPES_COLUMNS, self.ALL_TYPES_ROWDATA) - snapshot = session.snapshot(read_timestamp=batch.committed) + snapshot = session.snapshot( + read_timestamp=batch.committed, multi_use=True) # Cannot equality-test array values. See below for a test w/ # array of IDs. diff --git a/spanner/tests/unit/test_database.py b/spanner/tests/unit/test_database.py index 6216d8a348fd..aa1643ed7582 100644 --- a/spanner/tests/unit/test_database.py +++ b/spanner/tests/unit/test_database.py @@ -682,12 +682,9 @@ def test_snapshot_defaults(self): checkout = database.snapshot() self.assertIsInstance(checkout, SnapshotCheckout) self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) + self.assertEqual(checkout._kw, {}) - def test_snapshot_w_read_timestamp(self): + def test_snapshot_w_read_timestamp_and_multi_use(self): import datetime from google.cloud._helpers import UTC from google.cloud.spanner.database import SnapshotCheckout @@ -700,78 +697,12 @@ def test_snapshot_w_read_timestamp(self): pool.put(session) database = self._make_one(self.DATABASE_ID, instance, pool=pool) - checkout = database.snapshot(read_timestamp=now) + checkout = database.snapshot(read_timestamp=now, multi_use=True) self.assertIsInstance(checkout, SnapshotCheckout) self.assertIs(checkout._database, database) - self.assertEqual(checkout._read_timestamp, now) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) - - def test_snapshot_w_min_read_timestamp(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud.spanner.database import SnapshotCheckout - - now = datetime.datetime.utcnow().replace(tzinfo=UTC) - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - pool = _Pool() - session = _Session() - pool.put(session) - database = self._make_one(self.DATABASE_ID, instance, pool=pool) - - checkout = database.snapshot(min_read_timestamp=now) - - self.assertIsInstance(checkout, SnapshotCheckout) - self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertEqual(checkout._min_read_timestamp, now) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) - - def test_snapshot_w_max_staleness(self): - import datetime - from google.cloud.spanner.database import SnapshotCheckout - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - pool = _Pool() - session = _Session() - pool.put(session) - database = self._make_one(self.DATABASE_ID, instance, pool=pool) - - checkout = database.snapshot(max_staleness=staleness) - - self.assertIsInstance(checkout, SnapshotCheckout) - self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertEqual(checkout._max_staleness, staleness) - self.assertIsNone(checkout._exact_staleness) - - def test_snapshot_w_exact_staleness(self): - import datetime - from google.cloud.spanner.database import SnapshotCheckout - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - client = _Client() - instance = _Instance(self.INSTANCE_NAME, client=client) - pool = _Pool() - session = _Session() - pool.put(session) - database = self._make_one(self.DATABASE_ID, instance, pool=pool) - - checkout = database.snapshot(exact_staleness=staleness) - - self.assertIsInstance(checkout, SnapshotCheckout) - self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertEqual(checkout._exact_staleness, staleness) + self.assertEqual( + checkout._kw, {'read_timestamp': now, 'multi_use': True}) class TestBatchCheckout(_BaseTest): @@ -862,20 +793,18 @@ def test_ctor_defaults(self): checkout = self._make_one(database) self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) + self.assertEqual(checkout._kw, {}) with checkout as snapshot: self.assertIsNone(pool._session) self.assertIsInstance(snapshot, Snapshot) self.assertIs(snapshot._session, session) self.assertTrue(snapshot._strong) + self.assertFalse(snapshot._multi_use) self.assertIs(pool._session, session) - def test_ctor_w_read_timestamp(self): + def test_ctor_w_read_timestamp_and_multi_use(self): import datetime from google.cloud._helpers import UTC from google.cloud.spanner.snapshot import Snapshot @@ -886,99 +815,17 @@ def test_ctor_w_read_timestamp(self): pool = database._pool = _Pool() pool.put(session) - checkout = self._make_one(database, read_timestamp=now) + checkout = self._make_one(database, read_timestamp=now, multi_use=True) self.assertIs(checkout._database, database) - self.assertEqual(checkout._read_timestamp, now) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) + self.assertEqual(checkout._kw, + {'read_timestamp': now, 'multi_use': True}) with checkout as snapshot: self.assertIsNone(pool._session) self.assertIsInstance(snapshot, Snapshot) self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) self.assertEqual(snapshot._read_timestamp, now) - - self.assertIs(pool._session, session) - - def test_ctor_w_min_read_timestamp(self): - import datetime - from google.cloud._helpers import UTC - from google.cloud.spanner.snapshot import Snapshot - - now = datetime.datetime.utcnow().replace(tzinfo=UTC) - database = _Database(self.DATABASE_NAME) - session = _Session(database) - pool = database._pool = _Pool() - pool.put(session) - - checkout = self._make_one(database, min_read_timestamp=now) - self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertEqual(checkout._min_read_timestamp, now) - self.assertIsNone(checkout._max_staleness) - self.assertIsNone(checkout._exact_staleness) - - with checkout as snapshot: - self.assertIsNone(pool._session) - self.assertIsInstance(snapshot, Snapshot) - self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) - self.assertEqual(snapshot._min_read_timestamp, now) - - self.assertIs(pool._session, session) - - def test_ctor_w_max_staleness(self): - import datetime - from google.cloud.spanner.snapshot import Snapshot - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - database = _Database(self.DATABASE_NAME) - session = _Session(database) - pool = database._pool = _Pool() - pool.put(session) - - checkout = self._make_one(database, max_staleness=staleness) - self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertEqual(checkout._max_staleness, staleness) - self.assertIsNone(checkout._exact_staleness) - - with checkout as snapshot: - self.assertIsNone(pool._session) - self.assertIsInstance(snapshot, Snapshot) - self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) - self.assertEqual(snapshot._max_staleness, staleness) - - self.assertIs(pool._session, session) - - def test_ctor_w_exact_staleness(self): - import datetime - from google.cloud.spanner.snapshot import Snapshot - - staleness = datetime.timedelta(seconds=1, microseconds=234567) - database = _Database(self.DATABASE_NAME) - session = _Session(database) - pool = database._pool = _Pool() - pool.put(session) - - checkout = self._make_one(database, exact_staleness=staleness) - - self.assertIs(checkout._database, database) - self.assertIsNone(checkout._read_timestamp) - self.assertIsNone(checkout._min_read_timestamp) - self.assertIsNone(checkout._max_staleness) - self.assertEqual(checkout._exact_staleness, staleness) - - with checkout as snapshot: - self.assertIsNone(pool._session) - self.assertIsInstance(snapshot, Snapshot) - self.assertIs(snapshot._session, session) - self.assertFalse(snapshot._strong) - self.assertEqual(snapshot._exact_staleness, staleness) + self.assertTrue(snapshot._multi_use) self.assertIs(pool._session, session) diff --git a/spanner/tests/unit/test_session.py b/spanner/tests/unit/test_session.py index ce9f81eccc7a..100555c8e49f 100644 --- a/spanner/tests/unit/test_session.py +++ b/spanner/tests/unit/test_session.py @@ -225,6 +225,21 @@ def test_snapshot_created(self): self.assertIsInstance(snapshot, Snapshot) self.assertIs(snapshot._session, session) self.assertTrue(snapshot._strong) + self.assertFalse(snapshot._multi_use) + + def test_snapshot_created_w_multi_use(self): + from google.cloud.spanner.snapshot import Snapshot + + database = _Database(self.DATABASE_NAME) + session = self._make_one(database) + session._session_id = 'DEADBEEF' # emulate 'session.create()' + + snapshot = session.snapshot(multi_use=True) + + self.assertIsInstance(snapshot, Snapshot) + self.assertTrue(snapshot._session is session) + self.assertTrue(snapshot._strong) + self.assertTrue(snapshot._multi_use) def test_read_not_created(self): from google.cloud.spanner.keyset import KeySet @@ -403,7 +418,7 @@ def test_retry_transaction_w_commit_error_txn_already_begun(self): session = self._make_one(database) session._session_id = 'DEADBEEF' begun_txn = session._transaction = Transaction(session) - begun_txn._id = b'FACEDACE' + begun_txn._transaction_id = b'FACEDACE' called_with = [] diff --git a/spanner/tests/unit/test_snapshot.py b/spanner/tests/unit/test_snapshot.py index c5213dbd6cda..4717a14c2f24 100644 --- a/spanner/tests/unit/test_snapshot.py +++ b/spanner/tests/unit/test_snapshot.py @@ -53,12 +53,19 @@ def _makeDerived(self, session): class _Derived(self._getTargetClass()): + _transaction_id = None + _multi_use = False + def _make_txn_selector(self): from google.cloud.proto.spanner.v1.transaction_pb2 import ( TransactionOptions, TransactionSelector) + if self._transaction_id: + return TransactionSelector(id=self._transaction_id) options = TransactionOptions( read_only=TransactionOptions.ReadOnly(strong=True)) + if self._multi_use: + return TransactionSelector(begin=options) return TransactionSelector(single_use=options) return _Derived(session) @@ -105,7 +112,7 @@ def test_read_grpc_error(self): self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', database.name)]) - def test_read_normal(self): + def _read_helper(self, multi_use, first=True, count=0): from google.protobuf.struct_pb2 import Struct from google.cloud.proto.spanner.v1.result_set_pb2 import ( PartialResultSet, ResultSetMetadata, ResultSetStats) @@ -116,6 +123,7 @@ def test_read_normal(self): from google.cloud.spanner.keyset import KeySet from google.cloud.spanner._helpers import _make_value_pb + TXN_ID = b'DEADBEEF' VALUES = [ [u'bharney', 31], [u'phred', 32], @@ -147,11 +155,22 @@ def test_read_normal(self): _streaming_read_response=_MockCancellableIterator(*result_sets)) session = _Session(database) derived = self._makeDerived(session) + derived._multi_use = multi_use + derived._read_request_count = count + if not first: + derived._transaction_id = TXN_ID result_set = derived.read( TABLE_NAME, COLUMNS, KEYSET, index=INDEX, limit=LIMIT, resume_token=TOKEN) + self.assertEqual(derived._read_request_count, count + 1) + + if multi_use: + self.assertIs(result_set._source, derived) + else: + self.assertIsNone(result_set._source) + result_set.consume_all() self.assertEqual(list(result_set.rows), VALUES) self.assertEqual(result_set.metadata, metadata_pb) @@ -165,13 +184,39 @@ def test_read_normal(self): self.assertEqual(columns, COLUMNS) self.assertEqual(key_set, KEYSET.to_pb()) self.assertIsInstance(transaction, TransactionSelector) - self.assertTrue(transaction.single_use.read_only.strong) + if multi_use: + if first: + self.assertTrue(transaction.begin.read_only.strong) + else: + self.assertEqual(transaction.id, TXN_ID) + else: + self.assertTrue(transaction.single_use.read_only.strong) self.assertEqual(index, INDEX) self.assertEqual(limit, LIMIT) self.assertEqual(resume_token, TOKEN) self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', database.name)]) + def test_read_wo_multi_use(self): + self._read_helper(multi_use=False) + + def test_read_wo_multi_use_w_read_request_count_gt_0(self): + with self.assertRaises(ValueError): + self._read_helper(multi_use=False, count=1) + + def test_read_w_multi_use_wo_first(self): + self._read_helper(multi_use=True, first=False) + + def test_read_w_multi_use_wo_first_w_count_gt_0(self): + self._read_helper(multi_use=True, first=False, count=1) + + def test_read_w_multi_use_w_first(self): + self._read_helper(multi_use=True, first=True) + + def test_read_w_multi_use_w_first_w_count_gt_0(self): + with self.assertRaises(ValueError): + self._read_helper(multi_use=True, first=True, count=1) + def test_execute_sql_grpc_error(self): from google.cloud.proto.spanner.v1.transaction_pb2 import ( TransactionSelector) @@ -208,7 +253,7 @@ def test_execute_sql_w_params_wo_param_types(self): with self.assertRaises(ValueError): derived.execute_sql(SQL_QUERY_WITH_PARAM, PARAMS) - def test_execute_sql_normal(self): + def _execute_sql_helper(self, multi_use, first=True, count=0): from google.protobuf.struct_pb2 import Struct from google.cloud.proto.spanner.v1.result_set_pb2 import ( PartialResultSet, ResultSetMetadata, ResultSetStats) @@ -218,6 +263,7 @@ def test_execute_sql_normal(self): from google.cloud.proto.spanner.v1.type_pb2 import STRING, INT64 from google.cloud.spanner._helpers import _make_value_pb + TXN_ID = b'DEADBEEF' VALUES = [ [u'bharney', u'rhubbyl', 31], [u'phred', u'phlyntstone', 32], @@ -248,11 +294,22 @@ def test_execute_sql_normal(self): _execute_streaming_sql_response=iterator) session = _Session(database) derived = self._makeDerived(session) + derived._multi_use = multi_use + derived._read_request_count = count + if not first: + derived._transaction_id = TXN_ID result_set = derived.execute_sql( SQL_QUERY_WITH_PARAM, PARAMS, PARAM_TYPES, query_mode=MODE, resume_token=TOKEN) + self.assertEqual(derived._read_request_count, count + 1) + + if multi_use: + self.assertIs(result_set._source, derived) + else: + self.assertIsNone(result_set._source) + result_set.consume_all() self.assertEqual(list(result_set.rows), VALUES) self.assertEqual(result_set.metadata, metadata_pb) @@ -264,7 +321,13 @@ def test_execute_sql_normal(self): self.assertEqual(r_session, self.SESSION_NAME) self.assertEqual(sql, SQL_QUERY_WITH_PARAM) self.assertIsInstance(transaction, TransactionSelector) - self.assertTrue(transaction.single_use.read_only.strong) + if multi_use: + if first: + self.assertTrue(transaction.begin.read_only.strong) + else: + self.assertEqual(transaction.id, TXN_ID) + else: + self.assertTrue(transaction.single_use.read_only.strong) expected_params = Struct(fields={ key: _make_value_pb(value) for (key, value) in PARAMS.items()}) self.assertEqual(params, expected_params) @@ -274,6 +337,26 @@ def test_execute_sql_normal(self): self.assertEqual(options.kwargs['metadata'], [('google-cloud-resource-prefix', database.name)]) + def test_execute_sql_wo_multi_use(self): + self._execute_sql_helper(multi_use=False) + + def test_execute_sql_wo_multi_use_w_read_request_count_gt_0(self): + with self.assertRaises(ValueError): + self._execute_sql_helper(multi_use=False, count=1) + + def test_execute_sql_w_multi_use_wo_first(self): + self._execute_sql_helper(multi_use=True, first=False) + + def test_execute_sql_w_multi_use_wo_first_w_count_gt_0(self): + self._execute_sql_helper(multi_use=True, first=False, count=1) + + def test_execute_sql_w_multi_use_w_first(self): + self._execute_sql_helper(multi_use=True, first=True) + + def test_execute_sql_w_multi_use_w_first_w_count_gt_0(self): + with self.assertRaises(ValueError): + self._execute_sql_helper(multi_use=True, first=True, count=1) + class _MockCancellableIterator(object): @@ -298,6 +381,7 @@ class TestSnapshot(unittest.TestCase): DATABASE_NAME = INSTANCE_NAME + '/databases/' + DATABASE_ID SESSION_ID = 'session-id' SESSION_NAME = DATABASE_NAME + '/sessions/' + SESSION_ID + TRANSACTION_ID = b'DEADBEEF' def _getTargetClass(self): from google.cloud.spanner.snapshot import Snapshot @@ -326,6 +410,7 @@ def test_ctor_defaults(self): self.assertIsNone(snapshot._min_read_timestamp) self.assertIsNone(snapshot._max_staleness) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_multiple_options(self): timestamp = self._makeTimestamp() @@ -346,6 +431,7 @@ def test_ctor_w_read_timestamp(self): self.assertIsNone(snapshot._min_read_timestamp) self.assertIsNone(snapshot._max_staleness) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_min_read_timestamp(self): timestamp = self._makeTimestamp() @@ -357,6 +443,7 @@ def test_ctor_w_min_read_timestamp(self): self.assertEqual(snapshot._min_read_timestamp, timestamp) self.assertIsNone(snapshot._max_staleness) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_max_staleness(self): duration = self._makeDuration() @@ -368,6 +455,7 @@ def test_ctor_w_max_staleness(self): self.assertIsNone(snapshot._min_read_timestamp) self.assertEqual(snapshot._max_staleness, duration) self.assertIsNone(snapshot._exact_staleness) + self.assertFalse(snapshot._multi_use) def test_ctor_w_exact_staleness(self): duration = self._makeDuration() @@ -379,6 +467,66 @@ def test_ctor_w_exact_staleness(self): self.assertIsNone(snapshot._min_read_timestamp) self.assertIsNone(snapshot._max_staleness) self.assertEqual(snapshot._exact_staleness, duration) + self.assertFalse(snapshot._multi_use) + + def test_ctor_w_multi_use(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + self.assertTrue(snapshot._session is session) + self.assertTrue(snapshot._strong) + self.assertIsNone(snapshot._read_timestamp) + self.assertIsNone(snapshot._min_read_timestamp) + self.assertIsNone(snapshot._max_staleness) + self.assertIsNone(snapshot._exact_staleness) + self.assertTrue(snapshot._multi_use) + + def test_ctor_w_multi_use_and_read_timestamp(self): + timestamp = self._makeTimestamp() + session = _Session() + snapshot = self._make_one( + session, read_timestamp=timestamp, multi_use=True) + self.assertTrue(snapshot._session is session) + self.assertFalse(snapshot._strong) + self.assertEqual(snapshot._read_timestamp, timestamp) + self.assertIsNone(snapshot._min_read_timestamp) + self.assertIsNone(snapshot._max_staleness) + self.assertIsNone(snapshot._exact_staleness) + self.assertTrue(snapshot._multi_use) + + def test_ctor_w_multi_use_and_min_read_timestamp(self): + timestamp = self._makeTimestamp() + session = _Session() + + with self.assertRaises(ValueError): + self._make_one( + session, min_read_timestamp=timestamp, multi_use=True) + + def test_ctor_w_multi_use_and_max_staleness(self): + duration = self._makeDuration() + session = _Session() + + with self.assertRaises(ValueError): + self._make_one(session, max_staleness=duration, multi_use=True) + + def test_ctor_w_multi_use_and_exact_staleness(self): + duration = self._makeDuration() + session = _Session() + snapshot = self._make_one( + session, exact_staleness=duration, multi_use=True) + self.assertTrue(snapshot._session is session) + self.assertFalse(snapshot._strong) + self.assertIsNone(snapshot._read_timestamp) + self.assertIsNone(snapshot._min_read_timestamp) + self.assertIsNone(snapshot._max_staleness) + self.assertEqual(snapshot._exact_staleness, duration) + self.assertTrue(snapshot._multi_use) + + def test__make_txn_selector_w_transaction_id(self): + session = _Session() + snapshot = self._make_one(session) + snapshot._transaction_id = self.TRANSACTION_ID + selector = snapshot._make_txn_selector() + self.assertEqual(selector.id, self.TRANSACTION_ID) def test__make_txn_selector_strong(self): session = _Session() @@ -429,6 +577,127 @@ def test__make_txn_selector_w_exact_staleness(self): self.assertEqual(options.read_only.exact_staleness.seconds, 3) self.assertEqual(options.read_only.exact_staleness.nanos, 123456000) + def test__make_txn_selector_strong_w_multi_use(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + selector = snapshot._make_txn_selector() + options = selector.begin + self.assertTrue(options.read_only.strong) + + def test__make_txn_selector_w_read_timestamp_w_multi_use(self): + from google.cloud._helpers import _pb_timestamp_to_datetime + + timestamp = self._makeTimestamp() + session = _Session() + snapshot = self._make_one( + session, read_timestamp=timestamp, multi_use=True) + selector = snapshot._make_txn_selector() + options = selector.begin + self.assertEqual( + _pb_timestamp_to_datetime(options.read_only.read_timestamp), + timestamp) + + def test__make_txn_selector_w_exact_staleness_w_multi_use(self): + duration = self._makeDuration(seconds=3, microseconds=123456) + session = _Session() + snapshot = self._make_one( + session, exact_staleness=duration, multi_use=True) + selector = snapshot._make_txn_selector() + options = selector.begin + self.assertEqual(options.read_only.exact_staleness.seconds, 3) + self.assertEqual(options.read_only.exact_staleness.nanos, 123456000) + + def test_begin_wo_multi_use(self): + session = _Session() + snapshot = self._make_one(session) + with self.assertRaises(ValueError): + snapshot.begin() + + def test_begin_w_read_request_count_gt_0(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + snapshot._read_request_count = 1 + with self.assertRaises(ValueError): + snapshot.begin() + + def test_begin_w_existing_txn_id(self): + session = _Session() + snapshot = self._make_one(session, multi_use=True) + snapshot._transaction_id = self.TRANSACTION_ID + with self.assertRaises(ValueError): + snapshot.begin() + + def test_begin_w_gax_error(self): + from google.gax.errors import GaxError + from google.cloud._helpers import _pb_timestamp_to_datetime + + database = _Database() + api = database.spanner_api = _FauxSpannerAPI( + _random_gax_error=True) + timestamp = self._makeTimestamp() + session = _Session(database) + snapshot = self._make_one( + session, read_timestamp=timestamp, multi_use=True) + + with self.assertRaises(GaxError): + snapshot.begin() + + session_id, txn_options, options = api._begun + self.assertEqual(session_id, session.name) + self.assertEqual( + _pb_timestamp_to_datetime(txn_options.read_only.read_timestamp), + timestamp) + self.assertEqual(options.kwargs['metadata'], + [('google-cloud-resource-prefix', database.name)]) + + def test_begin_ok_exact_staleness(self): + from google.cloud.proto.spanner.v1.transaction_pb2 import ( + Transaction as TransactionPB) + + transaction_pb = TransactionPB(id=self.TRANSACTION_ID) + database = _Database() + api = database.spanner_api = _FauxSpannerAPI( + _begin_transaction_response=transaction_pb) + duration = self._makeDuration(seconds=3, microseconds=123456) + session = _Session(database) + snapshot = self._make_one( + session, exact_staleness=duration, multi_use=True) + + txn_id = snapshot.begin() + + self.assertEqual(txn_id, self.TRANSACTION_ID) + self.assertEqual(snapshot._transaction_id, self.TRANSACTION_ID) + + session_id, txn_options, options = api._begun + self.assertEqual(session_id, session.name) + read_only = txn_options.read_only + self.assertEqual(read_only.exact_staleness.seconds, 3) + self.assertEqual(read_only.exact_staleness.nanos, 123456000) + self.assertEqual(options.kwargs['metadata'], + [('google-cloud-resource-prefix', database.name)]) + + def test_begin_ok_exact_strong(self): + from google.cloud.proto.spanner.v1.transaction_pb2 import ( + Transaction as TransactionPB) + + transaction_pb = TransactionPB(id=self.TRANSACTION_ID) + database = _Database() + api = database.spanner_api = _FauxSpannerAPI( + _begin_transaction_response=transaction_pb) + session = _Session(database) + snapshot = self._make_one(session, multi_use=True) + + txn_id = snapshot.begin() + + self.assertEqual(txn_id, self.TRANSACTION_ID) + self.assertEqual(snapshot._transaction_id, self.TRANSACTION_ID) + + session_id, txn_options, options = api._begun + self.assertEqual(session_id, session.name) + self.assertTrue(txn_options.read_only.strong) + self.assertEqual(options.kwargs['metadata'], + [('google-cloud-resource-prefix', database.name)]) + class _Session(object): @@ -443,7 +712,15 @@ class _Database(object): class _FauxSpannerAPI(_GAXBaseAPI): - _read_with = None + _read_with = _begin = None + + def begin_transaction(self, session, options_, options=None): + from google.gax.errors import GaxError + + self._begun = (session, options_, options) + if self._random_gax_error: + raise GaxError('error') + return self._begin_transaction_response # pylint: disable=too-many-arguments def streaming_read(self, session, table, columns, key_set, diff --git a/spanner/tests/unit/test_streamed.py b/spanner/tests/unit/test_streamed.py index edcace273f66..2e31f4dfad2c 100644 --- a/spanner/tests/unit/test_streamed.py +++ b/spanner/tests/unit/test_streamed.py @@ -15,6 +15,8 @@ import unittest +import mock + class TestStreamedResultSet(unittest.TestCase): @@ -30,6 +32,18 @@ def test_ctor_defaults(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) self.assertIs(streamed._response_iterator, iterator) + self.assertIsNone(streamed._source) + self.assertEqual(streamed.rows, []) + self.assertIsNone(streamed.metadata) + self.assertIsNone(streamed.stats) + self.assertIsNone(streamed.resume_token) + + def test_ctor_w_source(self): + iterator = _MockCancellableIterator() + source = object() + streamed = self._make_one(iterator, source=source) + self.assertIs(streamed._response_iterator, iterator) + self.assertIs(streamed._source, source) self.assertEqual(streamed.rows, []) self.assertIsNone(streamed.metadata) self.assertIsNone(streamed.stats) @@ -42,14 +56,14 @@ def test_fields_unset(self): _ = streamed.fields @staticmethod - def _makeScalarField(name, type_): + def _make_scalar_field(name, type_): from google.cloud.proto.spanner.v1.type_pb2 import StructType from google.cloud.proto.spanner.v1.type_pb2 import Type return StructType.Field(name=name, type=Type(code=type_)) @staticmethod - def _makeArrayField(name, element_type_code=None, element_type=None): + def _make_array_field(name, element_type_code=None, element_type=None): from google.cloud.proto.spanner.v1.type_pb2 import StructType from google.cloud.proto.spanner.v1.type_pb2 import Type @@ -60,7 +74,7 @@ def _makeArrayField(name, element_type_code=None, element_type=None): return StructType.Field(name=name, type=array_type) @staticmethod - def _makeStructType(struct_type_fields): + def _make_struct_type(struct_type_fields): from google.cloud.proto.spanner.v1.type_pb2 import StructType from google.cloud.proto.spanner.v1.type_pb2 import Type @@ -72,13 +86,13 @@ def _makeStructType(struct_type_fields): return Type(code='STRUCT', struct_type=struct_type) @staticmethod - def _makeValue(value): + def _make_value(value): from google.cloud.spanner._helpers import _make_value_pb return _make_value_pb(value) @staticmethod - def _makeListValue(values=(), value_pbs=None): + def _make_list_value(values=(), value_pbs=None): from google.protobuf.struct_pb2 import ListValue from google.protobuf.struct_pb2 import Value from google.cloud.spanner._helpers import _make_list_value_pb @@ -87,15 +101,52 @@ def _makeListValue(values=(), value_pbs=None): return Value(list_value=ListValue(values=value_pbs)) return Value(list_value=_make_list_value_pb(values)) + @staticmethod + def _make_result_set_metadata(fields=(), transaction_id=None): + from google.cloud.proto.spanner.v1.result_set_pb2 import ( + ResultSetMetadata) + metadata = ResultSetMetadata() + for field in fields: + metadata.row_type.fields.add().CopyFrom(field) + if transaction_id is not None: + metadata.transaction.id = transaction_id + return metadata + + @staticmethod + def _make_result_set_stats(query_plan=None, **kw): + from google.cloud.proto.spanner.v1.result_set_pb2 import ( + ResultSetStats) + from google.protobuf.struct_pb2 import Struct + from google.cloud.spanner._helpers import _make_value_pb + + query_stats = Struct(fields={ + key: _make_value_pb(value) for key, value in kw.items()}) + return ResultSetStats( + query_plan=query_plan, + query_stats=query_stats, + ) + + @staticmethod + def _make_partial_result_set( + values, metadata=None, stats=None, chunked_value=False): + from google.cloud.proto.spanner.v1.result_set_pb2 import ( + PartialResultSet) + return PartialResultSet( + values=values, + metadata=metadata, + stats=stats, + chunked_value=chunked_value, + ) + def test_properties_set(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), ] - metadata = streamed._metadata = _ResultSetMetadataPB(FIELDS) - stats = streamed._stats = _ResultSetStatsPB() + metadata = streamed._metadata = self._make_result_set_metadata(FIELDS) + stats = streamed._stats = self._make_result_set_stats() self.assertEqual(list(streamed.fields), FIELDS) self.assertIs(streamed.metadata, metadata) self.assertIs(streamed.stats, stats) @@ -106,11 +157,11 @@ def test__merge_chunk_bool(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('registered_voter', 'BOOL'), + self._make_scalar_field('registered_voter', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(True) - chunk = self._makeValue(False) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(True) + chunk = self._make_value(False) with self.assertRaises(Unmergeable): streamed._merge_chunk(chunk) @@ -119,11 +170,11 @@ def test__merge_chunk_int64(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('age', 'INT64'), + self._make_scalar_field('age', 'INT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(42) - chunk = self._makeValue(13) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(42) + chunk = self._make_value(13) merged = streamed._merge_chunk(chunk) self.assertEqual(merged.string_value, '4213') @@ -133,11 +184,11 @@ def test__merge_chunk_float64_nan_string(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('weight', 'FLOAT64'), + self._make_scalar_field('weight', 'FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'Na') - chunk = self._makeValue(u'N') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'Na') + chunk = self._make_value(u'N') merged = streamed._merge_chunk(chunk) self.assertEqual(merged.string_value, u'NaN') @@ -146,11 +197,11 @@ def test__merge_chunk_float64_w_empty(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('weight', 'FLOAT64'), + self._make_scalar_field('weight', 'FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(3.14159) - chunk = self._makeValue('') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(3.14159) + chunk = self._make_value('') merged = streamed._merge_chunk(chunk) self.assertEqual(merged.number_value, 3.14159) @@ -161,11 +212,11 @@ def test__merge_chunk_float64_w_float64(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('weight', 'FLOAT64'), + self._make_scalar_field('weight', 'FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(3.14159) - chunk = self._makeValue(2.71828) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(3.14159) + chunk = self._make_value(2.71828) with self.assertRaises(Unmergeable): streamed._merge_chunk(chunk) @@ -174,11 +225,11 @@ def test__merge_chunk_string(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('name', 'STRING'), + self._make_scalar_field('name', 'STRING'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'phred') - chunk = self._makeValue(u'wylma') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'phred') + chunk = self._make_value(u'wylma') merged = streamed._merge_chunk(chunk) @@ -189,11 +240,11 @@ def test__merge_chunk_string_w_bytes(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('image', 'BYTES'), + self._make_scalar_field('image', 'BYTES'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAAAAAA6fptVAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA\n') - chunk = self._makeValue(u'B3RJTUUH4QQGFwsBTL3HMwAAABJpVFh0Q29tbWVudAAAAAAAU0FNUExFMG3E+AAAAApJREFUCNdj\nYAAAAAIAAeIhvDMAAAAASUVORK5CYII=\n') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAAAAAA6fptVAAAACXBIWXMAAAsTAAALEwEAmpwYAAAA\n') + chunk = self._make_value(u'B3RJTUUH4QQGFwsBTL3HMwAAABJpVFh0Q29tbWVudAAAAAAAU0FNUExFMG3E+AAAAApJREFUCNdj\nYAAAAAIAAeIhvDMAAAAASUVORK5CYII=\n') merged = streamed._merge_chunk(chunk) @@ -204,15 +255,15 @@ def test__merge_chunk_array_of_bool(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='BOOL'), + self._make_array_field('name', element_type_code='BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([True, True]) - chunk = self._makeListValue([False, False, False]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([True, True]) + chunk = self._make_list_value([False, False, False]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([True, True, False, False, False]) + expected = self._make_list_value([True, True, False, False, False]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -220,15 +271,15 @@ def test__merge_chunk_array_of_int(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='INT64'), + self._make_array_field('name', element_type_code='INT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([0, 1, 2]) - chunk = self._makeListValue([3, 4, 5]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([0, 1, 2]) + chunk = self._make_list_value([3, 4, 5]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([0, 1, 23, 4, 5]) + expected = self._make_list_value([0, 1, 23, 4, 5]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -242,15 +293,15 @@ def test__merge_chunk_array_of_float(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='FLOAT64'), + self._make_array_field('name', element_type_code='FLOAT64'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([PI, SQRT_2]) - chunk = self._makeListValue(['', EULER, LOG_10]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([PI, SQRT_2]) + chunk = self._make_list_value(['', EULER, LOG_10]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([PI, SQRT_2, EULER, LOG_10]) + expected = self._make_list_value([PI, SQRT_2, EULER, LOG_10]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -258,15 +309,15 @@ def test__merge_chunk_array_of_string(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='STRING'), + self._make_array_field('name', element_type_code='STRING'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([u'A', u'B', u'C']) - chunk = self._makeListValue([None, u'D', u'E']) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([u'A', u'B', u'C']) + chunk = self._make_list_value([None, u'D', u'E']) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([u'A', u'B', u'C', None, u'D', u'E']) + expected = self._make_list_value([u'A', u'B', u'C', None, u'D', u'E']) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -274,15 +325,15 @@ def test__merge_chunk_array_of_string_with_null(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeArrayField('name', element_type_code='STRING'), + self._make_array_field('name', element_type_code='STRING'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue([u'A', u'B', u'C']) - chunk = self._makeListValue([u'D', u'E']) + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value([u'A', u'B', u'C']) + chunk = self._make_list_value([u'D', u'E']) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue([u'A', u'B', u'CD', u'E']) + expected = self._make_list_value([u'A', u'B', u'CD', u'E']) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -298,22 +349,22 @@ def test__merge_chunk_array_of_array_of_int(self): FIELDS = [ StructType.Field(name='loloi', type=array_type) ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue(value_pbs=[ - self._makeListValue([0, 1]), - self._makeListValue([2]), + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value(value_pbs=[ + self._make_list_value([0, 1]), + self._make_list_value([2]), ]) - chunk = self._makeListValue(value_pbs=[ - self._makeListValue([3]), - self._makeListValue([4, 5]), + chunk = self._make_list_value(value_pbs=[ + self._make_list_value([3]), + self._make_list_value([4, 5]), ]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue(value_pbs=[ - self._makeListValue([0, 1]), - self._makeListValue([23]), - self._makeListValue([4, 5]), + expected = self._make_list_value(value_pbs=[ + self._make_list_value([0, 1]), + self._make_list_value([23]), + self._make_list_value([4, 5]), ]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -330,22 +381,22 @@ def test__merge_chunk_array_of_array_of_string(self): FIELDS = [ StructType.Field(name='lolos', type=array_type) ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeListValue(value_pbs=[ - self._makeListValue([u'A', u'B']), - self._makeListValue([u'C']), + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_list_value(value_pbs=[ + self._make_list_value([u'A', u'B']), + self._make_list_value([u'C']), ]) - chunk = self._makeListValue(value_pbs=[ - self._makeListValue([u'D']), - self._makeListValue([u'E', u'F']), + chunk = self._make_list_value(value_pbs=[ + self._make_list_value([u'D']), + self._make_list_value([u'E', u'F']), ]) merged = streamed._merge_chunk(chunk) - expected = self._makeListValue(value_pbs=[ - self._makeListValue([u'A', u'B']), - self._makeListValue([u'CD']), - self._makeListValue([u'E', u'F']), + expected = self._make_list_value(value_pbs=[ + self._make_list_value([u'A', u'B']), + self._make_list_value([u'CD']), + self._make_list_value([u'E', u'F']), ]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -353,47 +404,47 @@ def test__merge_chunk_array_of_array_of_string(self): def test__merge_chunk_array_of_struct(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) - struct_type = self._makeStructType([ + struct_type = self._make_struct_type([ ('name', 'STRING'), ('age', 'INT64'), ]) FIELDS = [ - self._makeArrayField('test', element_type=struct_type), + self._make_array_field('test', element_type=struct_type), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - partial = self._makeListValue([u'Phred ']) - streamed._pending_chunk = self._makeListValue(value_pbs=[partial]) - rest = self._makeListValue([u'Phlyntstone', 31]) - chunk = self._makeListValue(value_pbs=[rest]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + partial = self._make_list_value([u'Phred ']) + streamed._pending_chunk = self._make_list_value(value_pbs=[partial]) + rest = self._make_list_value([u'Phlyntstone', 31]) + chunk = self._make_list_value(value_pbs=[rest]) merged = streamed._merge_chunk(chunk) - struct = self._makeListValue([u'Phred Phlyntstone', 31]) - expected = self._makeListValue(value_pbs=[struct]) + struct = self._make_list_value([u'Phred Phlyntstone', 31]) + expected = self._make_list_value(value_pbs=[struct]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) def test__merge_chunk_array_of_struct_unmergeable(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) - struct_type = self._makeStructType([ + struct_type = self._make_struct_type([ ('name', 'STRING'), ('registered', 'BOOL'), ('voted', 'BOOL'), ]) FIELDS = [ - self._makeArrayField('test', element_type=struct_type), + self._make_array_field('test', element_type=struct_type), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) - partial = self._makeListValue([u'Phred Phlyntstone', True]) - streamed._pending_chunk = self._makeListValue(value_pbs=[partial]) - rest = self._makeListValue([True]) - chunk = self._makeListValue(value_pbs=[rest]) + streamed._metadata = self._make_result_set_metadata(FIELDS) + partial = self._make_list_value([u'Phred Phlyntstone', True]) + streamed._pending_chunk = self._make_list_value(value_pbs=[partial]) + rest = self._make_list_value([True]) + chunk = self._make_list_value(value_pbs=[rest]) merged = streamed._merge_chunk(chunk) - struct = self._makeListValue([u'Phred Phlyntstone', True, True]) - expected = self._makeListValue(value_pbs=[struct]) + struct = self._make_list_value([u'Phred Phlyntstone', True, True]) + expected = self._make_list_value(value_pbs=[struct]) self.assertEqual(merged, expected) self.assertIsNone(streamed._pending_chunk) @@ -401,11 +452,11 @@ def test_merge_values_empty_and_empty(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) streamed._current_row = [] streamed._merge_values([]) self.assertEqual(streamed.rows, []) @@ -415,13 +466,13 @@ def test_merge_values_empty_and_partial(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] + VALUES = [self._make_value(bare) for bare in BARE] streamed._current_row = [] streamed._merge_values(VALUES) self.assertEqual(streamed.rows, []) @@ -431,13 +482,13 @@ def test_merge_values_empty_and_filled(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42, True] - VALUES = [self._makeValue(bare) for bare in BARE] + VALUES = [self._make_value(bare) for bare in BARE] streamed._current_row = [] streamed._merge_values(VALUES) self.assertEqual(streamed.rows, [BARE]) @@ -447,17 +498,17 @@ def test_merge_values_empty_and_filled_plus(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BARE = [ u'Phred Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', ] - VALUES = [self._makeValue(bare) for bare in BARE] + VALUES = [self._make_value(bare) for bare in BARE] streamed._current_row = [] streamed._merge_values(VALUES) self.assertEqual(streamed.rows, [BARE[0:3], BARE[3:6]]) @@ -467,11 +518,11 @@ def test_merge_values_partial_and_empty(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [ u'Phred Phlyntstone' ] @@ -484,15 +535,15 @@ def test_merge_values_partial_and_partial(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [u'Phred Phlyntstone'] streamed._current_row[:] = BEFORE MERGED = [42] - TO_MERGE = [self._makeValue(item) for item in MERGED] + TO_MERGE = [self._make_value(item) for item in MERGED] streamed._merge_values(TO_MERGE) self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BEFORE + MERGED) @@ -501,17 +552,17 @@ def test_merge_values_partial_and_filled(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [ u'Phred Phlyntstone' ] streamed._current_row[:] = BEFORE MERGED = [42, True] - TO_MERGE = [self._makeValue(item) for item in MERGED] + TO_MERGE = [self._make_value(item) for item in MERGED] streamed._merge_values(TO_MERGE) self.assertEqual(streamed.rows, [BEFORE + MERGED]) self.assertEqual(streamed._current_row, []) @@ -520,13 +571,13 @@ def test_merge_values_partial_and_filled_plus(self): iterator = _MockCancellableIterator() streamed = self._make_one(iterator) FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) BEFORE = [ - self._makeValue(u'Phred Phlyntstone') + self._make_value(u'Phred Phlyntstone') ] streamed._current_row[:] = BEFORE MERGED = [ @@ -534,7 +585,7 @@ def test_merge_values_partial_and_filled_plus(self): u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', ] - TO_MERGE = [self._makeValue(item) for item in MERGED] + TO_MERGE = [self._make_value(item) for item in MERGED] VALUES = BEFORE + MERGED streamed._merge_values(TO_MERGE) self.assertEqual(streamed.rows, [VALUES[0:3], VALUES[3:6]]) @@ -547,36 +598,62 @@ def test_consume_next_empty(self): streamed.consume_next() def test_consume_next_first_set_partial(self): + TXN_ID = b'DEADBEEF' FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata( + FIELDS, transaction_id=TXN_ID) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, metadata=metadata) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) iterator = _MockCancellableIterator(result_set) - streamed = self._make_one(iterator) + source = mock.Mock(_transaction_id=None, spec=['_transaction_id']) + streamed = self._make_one(iterator, source=source) streamed.consume_next() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertIs(streamed.metadata, metadata) + self.assertEqual(streamed.metadata, metadata) + self.assertEqual(streamed.resume_token, result_set.resume_token) + self.assertEqual(source._transaction_id, TXN_ID) + + def test_consume_next_first_set_partial_existing_txn_id(self): + TXN_ID = b'DEADBEEF' + FIELDS = [ + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), + ] + metadata = self._make_result_set_metadata( + FIELDS, transaction_id=b'') + BARE = [u'Phred Phlyntstone', 42] + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) + iterator = _MockCancellableIterator(result_set) + source = mock.Mock(_transaction_id=TXN_ID, spec=['_transaction_id']) + streamed = self._make_one(iterator, source=source) + streamed.consume_next() + self.assertEqual(streamed.rows, []) + self.assertEqual(streamed._current_row, BARE) + self.assertEqual(streamed.metadata, metadata) self.assertEqual(streamed.resume_token, result_set.resume_token) + self.assertEqual(source._transaction_id, TXN_ID) def test_consume_next_w_partial_result(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] VALUES = [ - self._makeValue(u'Phred '), + self._make_value(u'Phred '), ] - result_set = _PartialResultSetPB(VALUES, chunked_value=True) + result_set = self._make_partial_result_set(VALUES, chunked_value=True) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) - streamed._metadata = _ResultSetMetadataPB(FIELDS) + streamed._metadata = self._make_result_set_metadata(FIELDS) streamed.consume_next() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, []) @@ -585,21 +662,21 @@ def test_consume_next_w_partial_result(self): def test_consume_next_w_pending_chunk(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] BARE = [ u'Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) - streamed._metadata = _ResultSetMetadataPB(FIELDS) - streamed._pending_chunk = self._makeValue(u'Phred ') + streamed._metadata = self._make_result_set_metadata(FIELDS) + streamed._pending_chunk = self._make_value(u'Phred ') streamed.consume_next() self.assertEqual(streamed.rows, [ [u'Phred Phlyntstone', BARE[1], BARE[2]], @@ -611,26 +688,26 @@ def test_consume_next_w_pending_chunk(self): def test_consume_next_last_set(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) - stats = _ResultSetStatsPB( + metadata = self._make_result_set_metadata(FIELDS) + stats = self._make_result_set_stats( rows_returned="1", elapsed_time="1.23 secs", - cpu_tme="0.98 secs", + cpu_time="0.98 secs", ) BARE = [u'Phred Phlyntstone', 42, True] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, stats=stats) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, stats=stats) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) streamed._metadata = metadata streamed.consume_next() self.assertEqual(streamed.rows, [BARE]) self.assertEqual(streamed._current_row, []) - self.assertIs(streamed._stats, stats) + self.assertEqual(streamed._stats, stats) self.assertEqual(streamed.resume_token, result_set.resume_token) def test_consume_all_empty(self): @@ -640,36 +717,37 @@ def test_consume_all_empty(self): def test_consume_all_one_result_set_partial(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, metadata=metadata) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) streamed.consume_all() self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertIs(streamed.metadata, metadata) + self.assertEqual(streamed.metadata, metadata) def test_consume_all_multiple_result_sets_filled(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [ u'Phred Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', 41, True, ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set1 = _PartialResultSetPB(VALUES[:4], metadata=metadata) - result_set2 = _PartialResultSetPB(VALUES[4:]) + VALUES = [self._make_value(bare) for bare in BARE] + result_set1 = self._make_partial_result_set( + VALUES[:4], metadata=metadata) + result_set2 = self._make_partial_result_set(VALUES[4:]) iterator = _MockCancellableIterator(result_set1, result_set2) streamed = self._make_one(iterator) streamed.consume_all() @@ -689,37 +767,38 @@ def test___iter___empty(self): def test___iter___one_result_set_partial(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [u'Phred Phlyntstone', 42] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set = _PartialResultSetPB(VALUES, metadata=metadata) + VALUES = [self._make_value(bare) for bare in BARE] + result_set = self._make_partial_result_set(VALUES, metadata=metadata) iterator = _MockCancellableIterator(result_set) streamed = self._make_one(iterator) found = list(streamed) self.assertEqual(found, []) self.assertEqual(streamed.rows, []) self.assertEqual(streamed._current_row, BARE) - self.assertIs(streamed.metadata, metadata) + self.assertEqual(streamed.metadata, metadata) def test___iter___multiple_result_sets_filled(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) BARE = [ u'Phred Phlyntstone', 42, True, u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', 41, True, ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set1 = _PartialResultSetPB(VALUES[:4], metadata=metadata) - result_set2 = _PartialResultSetPB(VALUES[4:]) + VALUES = [self._make_value(bare) for bare in BARE] + result_set1 = self._make_partial_result_set( + VALUES[:4], metadata=metadata) + result_set2 = self._make_partial_result_set(VALUES[4:]) iterator = _MockCancellableIterator(result_set1, result_set2) streamed = self._make_one(iterator) found = list(streamed) @@ -734,11 +813,11 @@ def test___iter___multiple_result_sets_filled(self): def test___iter___w_existing_rows_read(self): FIELDS = [ - self._makeScalarField('full_name', 'STRING'), - self._makeScalarField('age', 'INT64'), - self._makeScalarField('married', 'BOOL'), + self._make_scalar_field('full_name', 'STRING'), + self._make_scalar_field('age', 'INT64'), + self._make_scalar_field('married', 'BOOL'), ] - metadata = _ResultSetMetadataPB(FIELDS) + metadata = self._make_result_set_metadata(FIELDS) ALREADY = [ [u'Pebbylz Phlyntstone', 4, False], [u'Dino Rhubble', 4, False], @@ -748,9 +827,10 @@ def test___iter___w_existing_rows_read(self): u'Bharney Rhubble', 39, True, u'Wylma Phlyntstone', 41, True, ] - VALUES = [self._makeValue(bare) for bare in BARE] - result_set1 = _PartialResultSetPB(VALUES[:4], metadata=metadata) - result_set2 = _PartialResultSetPB(VALUES[4:]) + VALUES = [self._make_value(bare) for bare in BARE] + result_set1 = self._make_partial_result_set( + VALUES[:4], metadata=metadata) + result_set2 = self._make_partial_result_set(VALUES[4:]) iterator = _MockCancellableIterator(result_set1, result_set2) streamed = self._make_one(iterator) streamed._rows[:] = ALREADY @@ -779,40 +859,6 @@ def __next__(self): # pragma: NO COVER Py3k return self.next() -class _ResultSetMetadataPB(object): - - def __init__(self, fields): - from google.cloud.proto.spanner.v1.type_pb2 import StructType - - self.row_type = StructType(fields=fields) - - -class _ResultSetStatsPB(object): - - def __init__(self, query_plan=None, **query_stats): - from google.protobuf.struct_pb2 import Struct - from google.cloud.spanner._helpers import _make_value_pb - - self.query_plan = query_plan - self.query_stats = Struct(fields={ - key: _make_value_pb(value) for key, value in query_stats.items()}) - - -class _PartialResultSetPB(object): - - resume_token = b'DEADBEEF' - - def __init__(self, values, metadata=None, stats=None, chunked_value=False): - self.values = values - self.metadata = metadata - self.stats = stats - self.chunked_value = chunked_value - - def HasField(self, name): - assert name == 'stats' - return self.stats is not None - - class TestStreamedResultSet_JSON_acceptance_tests(unittest.TestCase): _json_tests = None diff --git a/spanner/tests/unit/test_transaction.py b/spanner/tests/unit/test_transaction.py index 973aeedb179d..9bb36d1f5435 100644 --- a/spanner/tests/unit/test_transaction.py +++ b/spanner/tests/unit/test_transaction.py @@ -51,9 +51,10 @@ def test_ctor_defaults(self): session = _Session() transaction = self._make_one(session) self.assertIs(transaction._session, session) - self.assertIsNone(transaction._id) + self.assertIsNone(transaction._transaction_id) self.assertIsNone(transaction.committed) - self.assertEqual(transaction._rolled_back, False) + self.assertFalse(transaction._rolled_back) + self.assertTrue(transaction._multi_use) def test__check_state_not_begun(self): session = _Session() @@ -64,7 +65,7 @@ def test__check_state_not_begun(self): def test__check_state_already_committed(self): session = _Session() transaction = self._make_one(session) - transaction._id = b'DEADBEEF' + transaction._transaction_id = self.TRANSACTION_ID transaction.committed = object() with self.assertRaises(ValueError): transaction._check_state() @@ -72,7 +73,7 @@ def test__check_state_already_committed(self): def test__check_state_already_rolled_back(self): session = _Session() transaction = self._make_one(session) - transaction._id = b'DEADBEEF' + transaction._transaction_id = self.TRANSACTION_ID transaction._rolled_back = True with self.assertRaises(ValueError): transaction._check_state() @@ -80,20 +81,20 @@ def test__check_state_already_rolled_back(self): def test__check_state_ok(self): session = _Session() transaction = self._make_one(session) - transaction._id = b'DEADBEEF' + transaction._transaction_id = self.TRANSACTION_ID transaction._check_state() # does not raise def test__make_txn_selector(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID selector = transaction._make_txn_selector() self.assertEqual(selector.id, self.TRANSACTION_ID) def test_begin_already_begun(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID with self.assertRaises(ValueError): transaction.begin() @@ -143,7 +144,7 @@ def test_begin_ok(self): txn_id = transaction.begin() self.assertEqual(txn_id, self.TRANSACTION_ID) - self.assertEqual(transaction._id, self.TRANSACTION_ID) + self.assertEqual(transaction._transaction_id, self.TRANSACTION_ID) session_id, txn_options, options = api._begun self.assertEqual(session_id, session.name) @@ -160,7 +161,7 @@ def test_rollback_not_begun(self): def test_rollback_already_committed(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.committed = object() with self.assertRaises(ValueError): transaction.rollback() @@ -168,7 +169,7 @@ def test_rollback_already_committed(self): def test_rollback_already_rolled_back(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction._rolled_back = True with self.assertRaises(ValueError): transaction.rollback() @@ -181,7 +182,7 @@ def test_rollback_w_gax_error(self): _random_gax_error=True) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.insert(TABLE_NAME, COLUMNS, VALUES) with self.assertRaises(GaxError): @@ -204,7 +205,7 @@ def test_rollback_ok(self): _rollback_response=empty_pb) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.replace(TABLE_NAME, COLUMNS, VALUES) transaction.rollback() @@ -227,7 +228,7 @@ def test_commit_not_begun(self): def test_commit_already_committed(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.committed = object() with self.assertRaises(ValueError): transaction.commit() @@ -235,7 +236,7 @@ def test_commit_already_committed(self): def test_commit_already_rolled_back(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction._rolled_back = True with self.assertRaises(ValueError): transaction.commit() @@ -243,7 +244,7 @@ def test_commit_already_rolled_back(self): def test_commit_no_mutations(self): session = _Session() transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID with self.assertRaises(ValueError): transaction.commit() @@ -255,7 +256,7 @@ def test_commit_w_gax_error(self): _random_gax_error=True) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.replace(TABLE_NAME, COLUMNS, VALUES) with self.assertRaises(GaxError): @@ -287,7 +288,7 @@ def test_commit_ok(self): _commit_response=response) session = _Session(database) transaction = self._make_one(session) - transaction._id = self.TRANSACTION_ID + transaction._transaction_id = self.TRANSACTION_ID transaction.delete(TABLE_NAME, keyset) transaction.commit() From 4b1f2ee433b0b2931d59a7c079a567201132b9cb Mon Sep 17 00:00:00 2001 From: Cal Peyser <calpeyser@gmail.com> Date: Mon, 17 Jul 2017 11:37:34 -0400 Subject: [PATCH 61/62] Adding RPC retries to Bigtable. --- bigtable/google/cloud/bigtable/retry.py | 169 +++++++++++++++++++ bigtable/google/cloud/bigtable/row_data.py | 3 + bigtable/google/cloud/bigtable/table.py | 101 ++++------- bigtable/tests/retry_test_script.txt | 38 +++++ bigtable/tests/system.py | 78 +++++++++ bigtable/tests/unit/_testing.py | 27 ++- bigtable/tests/unit/test_table.py | 185 +++++++++++++++++++-- 7 files changed, 520 insertions(+), 81 deletions(-) create mode 100644 bigtable/google/cloud/bigtable/retry.py create mode 100644 bigtable/tests/retry_test_script.txt diff --git a/bigtable/google/cloud/bigtable/retry.py b/bigtable/google/cloud/bigtable/retry.py new file mode 100644 index 000000000000..f20419ce4f8e --- /dev/null +++ b/bigtable/google/cloud/bigtable/retry.py @@ -0,0 +1,169 @@ +"""Provides function wrappers that implement retrying.""" +import random +import time +import six +import sys + +from google.cloud._helpers import _to_bytes +from google.cloud.bigtable._generated import ( + bigtable_pb2 as data_messages_v2_pb2) +from google.gax import config, errors +from grpc import RpcError + + +_MILLIS_PER_SECOND = 1000 + + +class ReadRowsIterator(object): + """Creates an iterator equivalent to a_iter, but that retries on certain + exceptions. + """ + + def __init__(self, client, name, start_key, end_key, filter_, limit, + retry_options, **kwargs): + self.client = client + self.retry_options = retry_options + self.name = name + self.start_key = start_key + self.start_key_closed = True + self.end_key = end_key + self.filter_ = filter_ + self.limit = limit + self.delay_mult = retry_options.backoff_settings.retry_delay_multiplier + self.max_delay_millis = \ + retry_options.backoff_settings.max_retry_delay_millis + self.timeout_mult = \ + retry_options.backoff_settings.rpc_timeout_multiplier + self.max_timeout = \ + (retry_options.backoff_settings.max_rpc_timeout_millis / + _MILLIS_PER_SECOND) + self.total_timeout = \ + (retry_options.backoff_settings.total_timeout_millis / + _MILLIS_PER_SECOND) + self.set_stream() + + def set_start_key(self, start_key): + """ + Sets the row key at which this iterator will begin reading. + """ + self.start_key = start_key + self.start_key_closed = False + + def set_stream(self): + """ + Resets the read stream by making an RPC on the 'ReadRows' endpoint. + """ + req_pb = _create_row_request(self.name, start_key=self.start_key, + start_key_closed=self.start_key_closed, + end_key=self.end_key, + filter_=self.filter_, limit=self.limit) + self.stream = self.client._data_stub.ReadRows(req_pb) + + def next(self, *args, **kwargs): + """ + Read and return the next row from the stream. + Retry on idempotent failure. + """ + delay = self.retry_options.backoff_settings.initial_retry_delay_millis + exc = errors.RetryError('Retry total timeout exceeded before any' + 'response was received') + timeout = (self.retry_options.backoff_settings + .initial_rpc_timeout_millis / + _MILLIS_PER_SECOND) + + now = time.time() + deadline = now + self.total_timeout + while deadline is None or now < deadline: + try: + return six.next(self.stream) + except StopIteration as stop: + raise stop + except RpcError as error: # pylint: disable=broad-except + code = config.exc_to_code(error) + if code not in self.retry_options.retry_codes: + six.reraise(type(error), error) + + # pylint: disable=redefined-variable-type + exc = errors.RetryError( + 'Retry total timeout exceeded with exception', error) + + # Sleep a random number which will, on average, equal the + # expected delay. + to_sleep = random.uniform(0, delay * 2) + time.sleep(to_sleep / _MILLIS_PER_SECOND) + delay = min(delay * self.delay_mult, self.max_delay_millis) + now = time.time() + timeout = min( + timeout * self.timeout_mult, self.max_timeout, + deadline - now) + self.set_stream() + + six.reraise(errors.RetryError, exc, sys.exc_info()[2]) + + def __next__(self, *args, **kwargs): + return self.next(*args, **kwargs) + + +def _create_row_request(table_name, row_key=None, start_key=None, + start_key_closed=True, end_key=None, filter_=None, + limit=None): + """Creates a request to read rows in a table. + + :type table_name: str + :param table_name: The name of the table to read from. + + :type row_key: bytes + :param row_key: (Optional) The key of a specific row to read from. + + :type start_key: bytes + :param start_key: (Optional) The beginning of a range of row keys to + read from. The range will include ``start_key``. If + left empty, will be interpreted as the empty string. + + :type end_key: bytes + :param end_key: (Optional) The end of a range of row keys to read from. + The range will not include ``end_key``. If left empty, + will be interpreted as an infinite string. + + :type filter_: :class:`.RowFilter` + :param filter_: (Optional) The filter to apply to the contents of the + specified row(s). If unset, reads the entire table. + + :type limit: int + :param limit: (Optional) The read will terminate after committing to N + rows' worth of results. The default (zero) is to return + all results. + + :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` + :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. + :raises: :class:`ValueError <exceptions.ValueError>` if both + ``row_key`` and one of ``start_key`` and ``end_key`` are set + """ + request_kwargs = {'table_name': table_name} + if (row_key is not None and + (start_key is not None or end_key is not None)): + raise ValueError('Row key and row range cannot be ' + 'set simultaneously') + range_kwargs = {} + if start_key is not None or end_key is not None: + if start_key is not None: + if start_key_closed: + range_kwargs['start_key_closed'] = _to_bytes(start_key) + else: + range_kwargs['start_key_open'] = _to_bytes(start_key) + if end_key is not None: + range_kwargs['end_key_open'] = _to_bytes(end_key) + if filter_ is not None: + request_kwargs['filter'] = filter_.to_pb() + if limit is not None: + request_kwargs['rows_limit'] = limit + + message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) + + if row_key is not None: + message.rows.row_keys.append(_to_bytes(row_key)) + + if range_kwargs: + message.rows.row_ranges.add(**range_kwargs) + + return message diff --git a/bigtable/google/cloud/bigtable/row_data.py b/bigtable/google/cloud/bigtable/row_data.py index 78179db25c4e..0849e681b7e6 100644 --- a/bigtable/google/cloud/bigtable/row_data.py +++ b/bigtable/google/cloud/bigtable/row_data.py @@ -274,6 +274,9 @@ def consume_next(self): self._validate_chunk(chunk) + if hasattr(self._response_iterator, 'set_start_key'): + self._response_iterator.set_start_key(chunk.row_key) + if chunk.reset_row: row = self._row = None cell = self._cell = self._previous_cell = None diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 40ef3a2ca2fb..ad6fab88dcf9 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -17,7 +17,6 @@ import six -from google.cloud._helpers import _to_bytes from google.cloud.bigtable._generated import ( bigtable_pb2 as data_messages_v2_pb2) from google.cloud.bigtable._generated import ( @@ -30,6 +29,26 @@ from google.cloud.bigtable.row import ConditionalRow from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.row_data import PartialRowsData +from google.gax import RetryOptions, BackoffSettings +from google.cloud.bigtable.retry import ReadRowsIterator, _create_row_request +from grpc import StatusCode + +BACKOFF_SETTINGS = BackoffSettings( + initial_retry_delay_millis=10, + retry_delay_multiplier=1.3, + max_retry_delay_millis=30000, + initial_rpc_timeout_millis=25 * 60 * 1000, + rpc_timeout_multiplier=1.0, + max_rpc_timeout_millis=25 * 60 * 1000, + total_timeout_millis=30 * 60 * 1000 +) + +RETRY_CODES = [ + StatusCode.DEADLINE_EXCEEDED, + StatusCode.ABORTED, + StatusCode.INTERNAL, + StatusCode.UNAVAILABLE +] # Maximum number of mutations in bulk (MutateRowsRequest message): @@ -257,7 +276,7 @@ def read_row(self, row_key, filter_=None): return rows_data.rows[row_key] def read_rows(self, start_key=None, end_key=None, limit=None, - filter_=None): + filter_=None, backoff_settings=None): """Read rows from this table. :type start_key: bytes @@ -284,13 +303,18 @@ def read_rows(self, start_key=None, end_key=None, limit=None, :returns: A :class:`.PartialRowsData` convenience wrapper for consuming the streamed results. """ - request_pb = _create_row_request( - self.name, start_key=start_key, end_key=end_key, filter_=filter_, - limit=limit) client = self._instance._client - response_iterator = client._data_stub.ReadRows(request_pb) - # We expect an iterator of `data_messages_v2_pb2.ReadRowsResponse` - return PartialRowsData(response_iterator) + if backoff_settings is None: + backoff_settings = BACKOFF_SETTINGS + RETRY_OPTIONS = RetryOptions( + retry_codes=RETRY_CODES, + backoff_settings=backoff_settings + ) + + retrying_iterator = ReadRowsIterator(client, self.name, start_key, + end_key, filter_, limit, + RETRY_OPTIONS) + return PartialRowsData(retrying_iterator) def mutate_rows(self, rows): """Mutates multiple rows in bulk. @@ -359,67 +383,6 @@ def sample_row_keys(self): return response_iterator -def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, - filter_=None, limit=None): - """Creates a request to read rows in a table. - - :type table_name: str - :param table_name: The name of the table to read from. - - :type row_key: bytes - :param row_key: (Optional) The key of a specific row to read from. - - :type start_key: bytes - :param start_key: (Optional) The beginning of a range of row keys to - read from. The range will include ``start_key``. If - left empty, will be interpreted as the empty string. - - :type end_key: bytes - :param end_key: (Optional) The end of a range of row keys to read from. - The range will not include ``end_key``. If left empty, - will be interpreted as an infinite string. - - :type filter_: :class:`.RowFilter` - :param filter_: (Optional) The filter to apply to the contents of the - specified row(s). If unset, reads the entire table. - - :type limit: int - :param limit: (Optional) The read will terminate after committing to N - rows' worth of results. The default (zero) is to return - all results. - - :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` - :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. - :raises: :class:`ValueError <exceptions.ValueError>` if both - ``row_key`` and one of ``start_key`` and ``end_key`` are set - """ - request_kwargs = {'table_name': table_name} - if (row_key is not None and - (start_key is not None or end_key is not None)): - raise ValueError('Row key and row range cannot be ' - 'set simultaneously') - range_kwargs = {} - if start_key is not None or end_key is not None: - if start_key is not None: - range_kwargs['start_key_closed'] = _to_bytes(start_key) - if end_key is not None: - range_kwargs['end_key_open'] = _to_bytes(end_key) - if filter_ is not None: - request_kwargs['filter'] = filter_.to_pb() - if limit is not None: - request_kwargs['rows_limit'] = limit - - message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) - - if row_key is not None: - message.rows.row_keys.append(_to_bytes(row_key)) - - if range_kwargs: - message.rows.row_ranges.add(**range_kwargs) - - return message - - def _mutate_rows_request(table_name, rows): """Creates a request to mutate rows in a table. diff --git a/bigtable/tests/retry_test_script.txt b/bigtable/tests/retry_test_script.txt new file mode 100644 index 000000000000..863662e897ba --- /dev/null +++ b/bigtable/tests/retry_test_script.txt @@ -0,0 +1,38 @@ +# This retry script is processed by the retry server and the client under test. +# Client tests should parse any command beginning with "CLIENT:", send the corresponding RPC +# to the retry server and expect a valid response. +# "EXPECT" commands indicate the call the server is expecting the client to send. +# +# The retry server has one table named "table" that should be used for testing. +# There are three types of commands supported: +# READ <comma-separated list of row ids to read> +# Expect the corresponding rows to be returned with arbitrary values. +# SCAN <range>... <comma separated list of row ids to expect> +# Ranges are expressed as an interval with either open or closed start and end, +# such as [1,3) for "1,2" or (1, 3] for "2,3". +# WRITE <comma-separated list of row ids to write> +# All writes should succeed eventually. Value payload is ignored. +# The server writes PASS or FAIL on a line by itself to STDOUT depending on the result of the test. +# All other server output should be ignored. + +# Echo same scan back after immediate error +CLIENT: SCAN [r1,r3) r1,r2 +EXPECT: SCAN [r1,r3) +SERVER: ERROR Unavailable +EXPECT: SCAN [r1,r3) +SERVER: READ_RESPONSE r1,r2 + +# Retry scans with open interval starting at the least read row key. +# Instead of using open intervals for retry ranges, '\x00' can be +# appended to the last received row key and sent in a closed interval. +CLIENT: SCAN [r1,r9) r1,r2,r3,r4,r5,r6,r7,r8 +EXPECT: SCAN [r1,r9) +SERVER: READ_RESPONSE r1,r2,r3,r4 +SERVER: ERROR Unavailable +EXPECT: SCAN (r4,r9) +SERVER: ERROR Unavailable +EXPECT: SCAN (r4,r9) +SERVER: READ_RESPONSE r5,r6,r7 +SERVER: ERROR Unavailable +EXPECT: SCAN (r7,r9) +SERVER: READ_RESPONSE r8 diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index cfc2cb17f805..f236138441f1 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -272,6 +272,84 @@ def test_delete_column_family(self): # Make sure we have successfully deleted it. self.assertEqual(temp_table.list_column_families(), {}) + def test_retry(self): + import subprocess, os, stat, platform + from google.cloud.bigtable.client import Client + from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable.table import Table + + # import for urlopen based on version + try: + # python 3 + from urllib.request import urlopen + except ImportError: + # python 2 + from urllib2 import urlopen + + + TEST_SCRIPT = 'tests/retry_test_script.txt' + SERVER_NAME = 'retry_server' + SERVER_ZIP = SERVER_NAME + ".tar.gz" + + def process_scan(table, range, ids): + range_chunks = range.split(",") + range_open = range_chunks[0].lstrip("[") + range_close = range_chunks[1].rstrip(")") + rows = table.read_rows(range_open, range_close) + rows.consume_all() + + # Download server + MOCK_SERVER_URLS = { + 'Linux': 'https://storage.googleapis.com/cloud-bigtable-test/retries/retry_server_linux.tar.gz', + 'Darwin': 'https://storage.googleapis.com/cloud-bigtable-test/retries/retry_server_mac.tar.gz', + } + + test_platform = platform.system() + if test_platform not in MOCK_SERVER_URLS: + self.skip('Retry server not available for platform {0}.'.format(test_platform)) + + mock_server_download = urlopen(MOCK_SERVER_URLS[test_platform]).read() + mock_server_file = open(SERVER_ZIP, 'wb') + mock_server_file.write(mock_server_download) + + # Unzip server + subprocess.call(['tar', 'zxvf', SERVER_ZIP, '-C', '.']) + + # Connect to server + server = subprocess.Popen( + ['./' + SERVER_NAME, '--script=' + TEST_SCRIPT], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + + (endpoint, port) = server.stdout.readline().rstrip("\n").split(":") + os.environ["BIGTABLE_EMULATOR_HOST"] = endpoint + ":" + port + client = Client(project="client", admin=True) + instance = Instance("instance", client) + table = instance.table("table") + + # Run test, line by line + with open(TEST_SCRIPT, 'r') as script: + for line in script.readlines(): + if line.startswith("CLIENT:"): + chunks = line.split(" ") + op = chunks[1] + process_scan(table, chunks[2], chunks[3]) + + # Check that the test passed + server.kill() + server_stdout_lines = [] + while True: + line = server.stdout.readline() + if line != '': + server_stdout_lines.append(line) + else: + break + self.assertEqual(server_stdout_lines[-1], "PASS\n") + + # Clean up + os.remove(SERVER_ZIP) + os.remove(SERVER_NAME) class TestDataAPI(unittest.TestCase): diff --git a/bigtable/tests/unit/_testing.py b/bigtable/tests/unit/_testing.py index e67af6a1498c..7587c66c133b 100644 --- a/bigtable/tests/unit/_testing.py +++ b/bigtable/tests/unit/_testing.py @@ -14,7 +14,6 @@ """Mocks used to emulate gRPC generated objects.""" - class _FakeStub(object): """Acts as a gPRC stub.""" @@ -27,6 +26,16 @@ def __getattr__(self, name): # since __getattribute__ will handle them. return _MethodMock(name, self) +class _CustomFakeStub(object): + """Acts as a gRPC stub. Generates a result using an injected callable.""" + def __init__(self, result_callable): + self.result_callable = result_callable + self.method_calls = [] + + def __getattr__(self, name): + # We need not worry about attributes set in constructor + # since __getattribute__ will handle them. + return _CustomMethodMock(name, self) class _MethodMock(object): """Mock for API method attached to a gRPC stub. @@ -42,5 +51,19 @@ def __call__(self, *args, **kwargs): """Sync method meant to mock a gRPC stub request.""" self._stub.method_calls.append((self._name, args, kwargs)) curr_result, self._stub.results = (self._stub.results[0], - self._stub.results[1:]) + self._stub.results[1:]) return curr_result + +class _CustomMethodMock(object): + """ + Same as _MethodMock, but backed by an injected callable. + """ + + def __init__(self, name, stub): + self._name = name + self._stub = stub + + def __call__(self, *args, **kwargs): + """Sync method meant to mock a gRPC stub request.""" + self._stub.method_calls.append((self._name, args, kwargs)) + return self._stub.result_callable() diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index dc4d2b5bbad0..c59667d6a821 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -493,7 +493,8 @@ def test_read_rows(self): from google.cloud._testing import _Monkey from tests.unit._testing import _FakeStub from google.cloud.bigtable.row_data import PartialRowsData - from google.cloud.bigtable import table as MUT + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator client = _Client() instance = _Instance(self.INSTANCE_NAME, client=client) @@ -513,20 +514,18 @@ def mock_create_row_request(table_name, **kwargs): # Patch the stub used by the API method. client._data_stub = stub = _FakeStub(response_iterator) - # Create expected_result. - expected_result = PartialRowsData(response_iterator) - - # Perform the method and check the result. start_key = b'start-key' end_key = b'end-key' filter_obj = object() limit = 22 with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Perform the method and check the result. result = table.read_rows( start_key=start_key, end_key=end_key, filter_=filter_obj, limit=limit) - self.assertEqual(result, expected_result) + self.assertIsInstance(result._response_iterator, ReadRowsIterator) + self.assertEqual(result._response_iterator.client, client) self.assertEqual(stub.method_calls, [( 'ReadRows', (request_pb,), @@ -537,9 +536,166 @@ def mock_create_row_request(table_name, **kwargs): 'end_key': end_key, 'filter_': filter_obj, 'limit': limit, + 'start_key_closed': True, } self.assertEqual(mock_created, [(table.name, created_kwargs)]) + def test_read_rows_one_chunk(self): + from google.cloud._testing import _Monkey + from tests.unit._testing import _FakeStub + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator + from google.cloud.bigtable.row_data import Cell + from google.cloud.bigtable.row_data import PartialRowsData + + client = _Client() + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._make_one(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb + + # Create response_iterator + chunk = _ReadRowsResponseCellChunkPB( + row_key=self.ROW_KEY, + family_name=self.FAMILY_NAME, + qualifier=self.QUALIFIER, + timestamp_micros=self.TIMESTAMP_MICROS, + value=self.VALUE, + commit_row=True, + ) + response_pb = _ReadRowsResponsePB(chunks=[chunk]) + response_iterator = iter([response_pb]) + + # Patch the stub used by the API method. + client._data_stub = stub = _FakeStub(response_iterator) + + start_key = b'start-key' + end_key = b'end-key' + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Perform the method and check the result. + result = table.read_rows( + start_key=start_key, end_key=end_key, filter_=filter_obj, + limit=limit) + result.consume_all() + + def test_read_rows_retry_timeout(self): + from google.cloud._testing import _Monkey + from tests.unit._testing import _CustomFakeStub + from google.cloud.bigtable.row_data import PartialRowsData + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator + from google.gax import BackoffSettings + from google.gax.errors import RetryError + from grpc import StatusCode, RpcError + import time + + client = _Client() + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._make_one(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb + + # Create a slow response iterator to cause a timeout + class MockTimeoutError(RpcError): + def code(self): + return StatusCode.DEADLINE_EXCEEDED + + def _wait_then_raise(): + time.sleep(0.1) + raise MockTimeoutError() + + # Patch the stub used by the API method. The stub should create a new + # slow_iterator every time its queried. + def make_slow_iterator(): + return (_wait_then_raise() for i in range(10)) + client._data_stub = stub = _CustomFakeStub(make_slow_iterator) + + # Set to timeout before RPC completes + test_backoff_settings = BackoffSettings( + initial_retry_delay_millis=10, + retry_delay_multiplier=0.3, + max_retry_delay_millis=30000, + initial_rpc_timeout_millis=1000, + rpc_timeout_multiplier=1.0, + max_rpc_timeout_millis=25 * 60 * 1000, + total_timeout_millis=1000 + ) + + start_key = b'start-key' + end_key = b'end-key' + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Verify that a RetryError is thrown on read. + result = table.read_rows( + start_key=start_key, end_key=end_key, filter_=filter_obj, + limit=limit, backoff_settings=test_backoff_settings) + with self.assertRaises(RetryError): + result.consume_next() + + def test_read_rows_non_idempotent_error_throws(self): + from google.cloud._testing import _Monkey + from tests.unit._testing import _CustomFakeStub + from google.cloud.bigtable.row_data import PartialRowsData + from google.cloud.bigtable import retry as MUT + from google.cloud.bigtable.retry import ReadRowsIterator + from google.gax import BackoffSettings + from google.gax.errors import RetryError + from grpc import StatusCode, RpcError + import time + + client = _Client() + instance = _Instance(self.INSTANCE_NAME, client=client) + table = self._make_one(self.TABLE_ID, instance) + + # Create request_pb + request_pb = object() # Returned by our mock. + mock_created = [] + + def mock_create_row_request(table_name, **kwargs): + mock_created.append((table_name, kwargs)) + return request_pb + + # Create response iterator that raises a non-idempotent exception + class MockNonIdempotentError(RpcError): + def code(self): + return StatusCode.RESOURCE_EXHAUSTED + + def _raise(): + raise MockNonIdempotentError() + + # Patch the stub used by the API method. The stub should create a new + # slow_iterator every time its queried. + def make_raising_iterator(): + return (_raise() for i in range(10)) + client._data_stub = stub = _CustomFakeStub(make_raising_iterator) + + start_key = b'start-key' + end_key = b'end-key' + filter_obj = object() + limit = 22 + with _Monkey(MUT, _create_row_request=mock_create_row_request): + # Verify that a RetryError is thrown on read. + result = table.read_rows( + start_key=start_key, end_key=end_key, filter_=filter_obj, + limit=limit) + with self.assertRaises(MockNonIdempotentError): + result.consume_next() + def test_sample_row_keys(self): from tests.unit._testing import _FakeStub @@ -572,12 +728,12 @@ def test_sample_row_keys(self): class Test__create_row_request(unittest.TestCase): def _call_fut(self, table_name, row_key=None, start_key=None, end_key=None, - filter_=None, limit=None): - from google.cloud.bigtable.table import _create_row_request + start_key_closed=True, filter_=None, limit=None): + from google.cloud.bigtable.retry import _create_row_request return _create_row_request( table_name, row_key=row_key, start_key=start_key, end_key=end_key, - filter_=filter_, limit=limit) + start_key_closed=start_key_closed, filter_=filter_, limit=limit) def test_table_name_only(self): table_name = 'table_name' @@ -600,7 +756,7 @@ def test_row_key(self): expected_result.rows.row_keys.append(row_key) self.assertEqual(result, expected_result) - def test_row_range_start_key(self): + def test_row_range_start_key_closed(self): table_name = 'table_name' start_key = b'start_key' result = self._call_fut(table_name, start_key=start_key) @@ -608,6 +764,15 @@ def test_row_range_start_key(self): expected_result.rows.row_ranges.add(start_key_closed=start_key) self.assertEqual(result, expected_result) + def test_row_range_start_key_open(self): + table_name = 'table_name' + start_key = b'start_key' + result = self._call_fut(table_name, start_key=start_key, + start_key_closed=False) + expected_result = _ReadRowsRequestPB(table_name=table_name) + expected_result.rows.row_ranges.add(start_key_open=start_key) + self.assertEqual(result, expected_result) + def test_row_range_end_key(self): table_name = 'table_name' end_key = b'end_key' From 675220b47a06b0f2126ec28bd27be566ffbdfdbb Mon Sep 17 00:00:00 2001 From: calpeyser <calpeyser@gmail.com> Date: Mon, 14 Aug 2017 10:47:06 -0700 Subject: [PATCH 62/62] encode server output in bigtable system test for python3 compatibility --- bigtable/tests/system.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index f236138441f1..3c5bffcfa337 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -322,7 +322,7 @@ def process_scan(table, range, ids): stderr=subprocess.PIPE, ) - (endpoint, port) = server.stdout.readline().rstrip("\n").split(":") + (endpoint, port) = server.stdout.readline().encode("utf-8").rstrip("\n").split(":") os.environ["BIGTABLE_EMULATOR_HOST"] = endpoint + ":" + port client = Client(project="client", admin=True) instance = Instance("instance", client)