diff --git a/sdk/tables/azure-data-tables/CHANGELOG.md b/sdk/tables/azure-data-tables/CHANGELOG.md index eaa624d292d4..8af93197f430 100644 --- a/sdk/tables/azure-data-tables/CHANGELOG.md +++ b/sdk/tables/azure-data-tables/CHANGELOG.md @@ -15,6 +15,7 @@ * Removed unused legacy client-side encryption attributes from client classes. * Fixed sharing of pipeline between service/table clients. * Added support for Azurite storage emulator +* Throws a `RequestTooLargeError` on batch requests that return a 413 error code ## 12.0.0b6 (2021-04-06) * Updated deserialization of datetime fields in entities to support preservation of the service format with additional decimal place. diff --git a/sdk/tables/azure-data-tables/azure/data/tables/__init__.py b/sdk/tables/azure-data-tables/azure/data/tables/__init__.py index 1735163dcb4f..5065c15cb49c 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/__init__.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/__init__.py @@ -6,6 +6,7 @@ from azure.data.tables._models import TableServiceStats from ._entity import TableEntity, EntityProperty, EdmType +from ._error import RequestTooLargeError from ._table_shared_access_signature import generate_table_sas, generate_account_sas from ._table_client import TableClient from ._table_service_client import TableServiceClient @@ -54,4 +55,5 @@ "SASProtocol", "TableBatchOperations", "BatchErrorException", + "RequestTooLargeError", ] diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py b/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py index 137dcbb91e0b..459872d79ce5 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_base_client.py @@ -41,6 +41,7 @@ STORAGE_OAUTH_SCOPE, SERVICE_HOST_BASE, ) +from ._error import RequestTooLargeError from ._models import LocationMode from ._authentication import SharedKeyCredentialPolicy from ._policies import ( @@ -306,6 +307,10 @@ def _batch_send( raise ResourceNotFoundError( message="The resource could not be found", response=response ) + if response.status_code == 413: + raise RequestTooLargeError( + message="The request was too large", response=response + ) if response.status_code != 202: raise BatchErrorException( message="There is a failure in the batch operation.", @@ -319,6 +324,10 @@ def _batch_send( raise ResourceNotFoundError( message="The resource could not be found", response=response ) + if any(p for p in parts if p.status_code == 413): + raise RequestTooLargeError( + message="The request was too large", response=response + ) raise BatchErrorException( message="There is a failure in the batch operation.", diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_error.py b/sdk/tables/azure-data-tables/azure/data/tables/_error.py index 45dd0dae2e56..bb33d2789a0b 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_error.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_error.py @@ -141,6 +141,10 @@ def _process_table_error(storage_error): raise error +class RequestTooLargeError(HttpResponseError): + """An error response with status code 413 - Request Entity Too Large""" + + class TableErrorCode(str, Enum): # Generic storage values account_already_exists = "AccountAlreadyExists" diff --git a/sdk/tables/azure-data-tables/azure/data/tables/_models.py b/sdk/tables/azure-data-tables/azure/data/tables/_models.py index 6eeb406a9a61..9b4197c3da4e 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/_models.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/_models.py @@ -523,19 +523,19 @@ class SASProtocol(str, Enum): HTTP = "http" -class PartialBatchErrorException(HttpResponseError): - """There is a partial failure in batch operations. - - :param str message: The message of the exception. - :param response: Server response to be deserialized. - :param list parts: A list of the parts in multipart response. - """ - - def __init__(self, message, response, parts): - self.parts = parts - super(PartialBatchErrorException, self).__init__( - message=message, response=response - ) +# class PartialBatchErrorException(HttpResponseError): +# """There is a partial failure in batch operations. + +# :param str message: The message of the exception. +# :param response: Server response to be deserialized. +# :param list parts: A list of the parts in multipart response. +# """ + +# def __init__(self, message, response, parts): +# self.parts = parts +# super(PartialBatchErrorException, self).__init__( +# message=message, response=response +# ) class BatchErrorException(HttpResponseError): diff --git a/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py b/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py index 70eed353733e..8774c1502824 100644 --- a/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py +++ b/sdk/tables/azure-data-tables/azure/data/tables/aio/_base_client_async.py @@ -31,6 +31,7 @@ from .._base_client import AccountHostsMixin, get_api_version, extract_batch_part_metadata from .._authentication import SharedKeyCredentialPolicy from .._constants import STORAGE_OAUTH_SCOPE +from .._error import RequestTooLargeError from .._models import BatchErrorException from .._policies import StorageHosts, StorageHeadersPolicy from .._sdk_moniker import SDK_MONIKER @@ -141,6 +142,10 @@ async def _batch_send( raise ResourceNotFoundError( message="The resource could not be found", response=response ) + if response.status_code == 413: + raise RequestTooLargeError( + message="The request was too large", response=response + ) if response.status_code != 202: raise BatchErrorException( message="There is a failure in the batch operation.", @@ -157,6 +162,11 @@ async def _batch_send( raise ResourceNotFoundError( message="The resource could not be found", response=response ) + if any(p for p in parts if p.status_code == 413): + raise RequestTooLargeError( + message="The request was too large", response=response + ) + raise BatchErrorException( message="There is a failure in the batch operation.", diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch.py b/sdk/tables/azure-data-tables/tests/test_table_batch.py index 204135f319d2..45ebc6c3a35a 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch.py @@ -10,6 +10,7 @@ from datetime import datetime, timedelta from dateutil.tz import tzutc +import os import sys import uuid @@ -20,7 +21,6 @@ from azure.core.exceptions import ( ResourceExistsError, ResourceNotFoundError, - HttpResponseError, ClientAuthenticationError ) from azure.data.tables import ( @@ -34,7 +34,7 @@ UpdateMode, generate_table_sas, TableSasPermissions, - TableClient + RequestTooLargeError ) from _shared.testcase import TableTestCase @@ -870,6 +870,31 @@ def test_batch_sas_auth(self, tables_storage_account_name, tables_primary_storag finally: self._tear_down() + @pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3") + @pytest.mark.live_test_only # Request bodies are very large + @TablesPreparer() + def test_batch_request_too_large(self, tables_storage_account_name, tables_primary_storage_account_key): + # Arrange + self._set_up(tables_storage_account_name, tables_primary_storage_account_key) + try: + + batch = self.table.create_batch() + entity = { + 'PartitionKey': 'pk001', + 'Foo': os.urandom(1024*64), + 'Bar': os.urandom(1024*64), + 'Baz': os.urandom(1024*64) + } + for i in range(50): + entity['RowKey'] = str(i) + batch.create_entity(entity) + + with pytest.raises(RequestTooLargeError): + self.table.send_batch(batch) + + finally: + self._tear_down() + class TestTableUnitTest(TableTestCase): diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch_async.py b/sdk/tables/azure-data-tables/tests/test_table_batch_async.py index aad24c21c8a9..0efc79a955bd 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch_async.py @@ -8,10 +8,11 @@ import pytest -import uuid from datetime import datetime, timedelta from dateutil.tz import tzutc +import os import sys +import uuid from devtools_testutils import AzureTestCase @@ -31,7 +32,8 @@ EdmType, BatchErrorException, generate_table_sas, - TableSasPermissions + TableSasPermissions, + RequestTooLargeError ) from _shared.asynctestcase import AsyncTableTestCase @@ -760,4 +762,30 @@ async def test_batch_sas_auth(self, tables_storage_account_name, tables_primary_ assert total_entities == transaction_count finally: - await self._tear_down() \ No newline at end of file + await self._tear_down() + + @pytest.mark.live_test_only # Request bodies are very large + @TablesPreparer() + async def test_batch_request_too_large(self, tables_storage_account_name, tables_primary_storage_account_key): + # Arrange + await self._set_up(tables_storage_account_name, tables_primary_storage_account_key) + from azure.data.tables import RequestTooLargeError + try: + + batch = self.table.create_batch() + entity = { + 'PartitionKey': 'pk001', + 'Foo': os.urandom(1024*64), + 'Bar': os.urandom(1024*64), + 'Baz': os.urandom(1024*64) + } + for i in range(50): + entity['RowKey'] = str(i) + batch.create_entity(entity) + + with pytest.raises(RequestTooLargeError): + await self.table.send_batch(batch) + + finally: + await self._tear_down() + diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py index 9fbb5cb5d3a6..607a6facbc64 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py @@ -7,8 +7,8 @@ # -------------------------------------------------------------------------- from datetime import datetime from dateutil.tz import tzutc +import os import sys -from time import sleep import uuid import pytest @@ -19,8 +19,6 @@ from azure.core.exceptions import ( ResourceExistsError, ResourceNotFoundError, - HttpResponseError, - ClientAuthenticationError ) from azure.data.tables import ( EdmType, @@ -30,7 +28,9 @@ BatchErrorException, TableServiceClient, TableEntity, - UpdateMode + UpdateMode, + + RequestTooLargeError ) from _shared.testcase import TableTestCase, SLEEP_DELAY @@ -66,7 +66,7 @@ def _tear_down(self): self.ts.delete_table(table_name) except: pass - sleep(SLEEP_DELAY) + self.sleep(SLEEP_DELAY) #--Helpers----------------------------------------------------------------- @@ -607,3 +607,28 @@ def test_new_delete_nonexistent_entity(self, tables_cosmos_account_name, tables_ finally: self._tear_down() + + @pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3") + @pytest.mark.live_test_only # Request bodies are very large + @CosmosPreparer() + def test_batch_request_too_large(self, tables_cosmos_account_name, tables_primary_cosmos_account_key): + # Arrange + self._set_up(tables_cosmos_account_name, tables_primary_cosmos_account_key) + try: + + batch = self.table.create_batch() + entity = { + 'PartitionKey': 'pk001', + 'Foo': os.urandom(1024*64), + 'Bar': os.urandom(1024*64), + 'Baz': os.urandom(1024*64) + } + for i in range(20): + entity['RowKey'] = str(i) + batch.create_entity(entity) + + with pytest.raises(RequestTooLargeError): + self.table.send_batch(batch) + + finally: + self._tear_down() diff --git a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py index 39dfd38a3a42..b87e80cce992 100644 --- a/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py +++ b/sdk/tables/azure-data-tables/tests/test_table_batch_cosmos_async.py @@ -8,8 +8,8 @@ from datetime import datetime from dateutil.tz import tzutc +import os import sys -from time import sleep import uuid import pytest @@ -28,7 +28,8 @@ UpdateMode, EntityProperty, EdmType, - BatchErrorException + BatchErrorException, + RequestTooLargeError ) from azure.data.tables.aio import TableServiceClient @@ -666,3 +667,28 @@ async def test_new_delete_nonexistent_entity(self, tables_cosmos_account_name, t finally: await self._tear_down() + + @pytest.mark.live_test_only # Request bodies are very large + @CosmosPreparer() + async def test_batch_request_too_large(self, tables_cosmos_account_name, tables_primary_cosmos_account_key): + # Arrange + await self._set_up(tables_cosmos_account_name, tables_primary_cosmos_account_key) + try: + + batch = self.table.create_batch() + entity = { + 'PartitionKey': 'pk001', + 'Foo': os.urandom(1024*64), + 'Bar': os.urandom(1024*64), + 'Baz': os.urandom(1024*64) + } + for i in range(20): + entity['RowKey'] = str(i) + batch.create_entity(entity) + + with pytest.raises(RequestTooLargeError): + await self.table.send_batch(batch) + + finally: + await self._tear_down() +