Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions sdk/tables/azure-data-tables/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
* Removed unused legacy client-side encryption attributes from client classes.
* Fixed sharing of pipeline between service/table clients.
* Added support for Azurite storage emulator
* Throws a `RequestTooLargeError` on batch requests that return a 413 error code

## 12.0.0b6 (2021-04-06)
* Updated deserialization of datetime fields in entities to support preservation of the service format with additional decimal place.
Expand Down
2 changes: 2 additions & 0 deletions sdk/tables/azure-data-tables/azure/data/tables/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from azure.data.tables._models import TableServiceStats

from ._entity import TableEntity, EntityProperty, EdmType
from ._error import RequestTooLargeError
from ._table_shared_access_signature import generate_table_sas, generate_account_sas
from ._table_client import TableClient
from ._table_service_client import TableServiceClient
Expand Down Expand Up @@ -54,4 +55,5 @@
"SASProtocol",
"TableBatchOperations",
"BatchErrorException",
"RequestTooLargeError",
]
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
STORAGE_OAUTH_SCOPE,
SERVICE_HOST_BASE,
)
from ._error import RequestTooLargeError
from ._models import LocationMode
from ._authentication import SharedKeyCredentialPolicy
from ._policies import (
Expand Down Expand Up @@ -306,6 +307,10 @@ def _batch_send(
raise ResourceNotFoundError(
message="The resource could not be found", response=response
)
if response.status_code == 413:
raise RequestTooLargeError(
message="The request was too large", response=response
)
if response.status_code != 202:
raise BatchErrorException(
message="There is a failure in the batch operation.",
Expand All @@ -319,6 +324,10 @@ def _batch_send(
raise ResourceNotFoundError(
message="The resource could not be found", response=response
)
if any(p for p in parts if p.status_code == 413):
raise RequestTooLargeError(
message="The request was too large", response=response
)

raise BatchErrorException(
message="There is a failure in the batch operation.",
Expand Down
4 changes: 4 additions & 0 deletions sdk/tables/azure-data-tables/azure/data/tables/_error.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,10 @@ def _process_table_error(storage_error):
raise error


class RequestTooLargeError(HttpResponseError):
"""An error response with status code 413 - Request Entity Too Large"""


class TableErrorCode(str, Enum):
# Generic storage values
account_already_exists = "AccountAlreadyExists"
Expand Down
26 changes: 13 additions & 13 deletions sdk/tables/azure-data-tables/azure/data/tables/_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -523,19 +523,19 @@ class SASProtocol(str, Enum):
HTTP = "http"


class PartialBatchErrorException(HttpResponseError):
"""There is a partial failure in batch operations.

:param str message: The message of the exception.
:param response: Server response to be deserialized.
:param list parts: A list of the parts in multipart response.
"""

def __init__(self, message, response, parts):
self.parts = parts
super(PartialBatchErrorException, self).__init__(
message=message, response=response
)
# class PartialBatchErrorException(HttpResponseError):
# """There is a partial failure in batch operations.

# :param str message: The message of the exception.
# :param response: Server response to be deserialized.
# :param list parts: A list of the parts in multipart response.
# """

# def __init__(self, message, response, parts):
# self.parts = parts
# super(PartialBatchErrorException, self).__init__(
# message=message, response=response
# )


class BatchErrorException(HttpResponseError):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from .._base_client import AccountHostsMixin, get_api_version, extract_batch_part_metadata
from .._authentication import SharedKeyCredentialPolicy
from .._constants import STORAGE_OAUTH_SCOPE
from .._error import RequestTooLargeError
from .._models import BatchErrorException
from .._policies import StorageHosts, StorageHeadersPolicy
from .._sdk_moniker import SDK_MONIKER
Expand Down Expand Up @@ -141,6 +142,10 @@ async def _batch_send(
raise ResourceNotFoundError(
message="The resource could not be found", response=response
)
if response.status_code == 413:
raise RequestTooLargeError(
message="The request was too large", response=response
)
if response.status_code != 202:
raise BatchErrorException(
message="There is a failure in the batch operation.",
Expand All @@ -157,6 +162,11 @@ async def _batch_send(
raise ResourceNotFoundError(
message="The resource could not be found", response=response
)
if any(p for p in parts if p.status_code == 413):
raise RequestTooLargeError(
message="The request was too large", response=response
)


raise BatchErrorException(
message="There is a failure in the batch operation.",
Expand Down
29 changes: 27 additions & 2 deletions sdk/tables/azure-data-tables/tests/test_table_batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from datetime import datetime, timedelta
from dateutil.tz import tzutc
import os
import sys
import uuid

Expand All @@ -20,7 +21,6 @@
from azure.core.exceptions import (
ResourceExistsError,
ResourceNotFoundError,
HttpResponseError,
ClientAuthenticationError
)
from azure.data.tables import (
Expand All @@ -34,7 +34,7 @@
UpdateMode,
generate_table_sas,
TableSasPermissions,
TableClient
RequestTooLargeError
)

from _shared.testcase import TableTestCase
Expand Down Expand Up @@ -870,6 +870,31 @@ def test_batch_sas_auth(self, tables_storage_account_name, tables_primary_storag
finally:
self._tear_down()

@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@pytest.mark.live_test_only # Request bodies are very large
@TablesPreparer()
def test_batch_request_too_large(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
try:

batch = self.table.create_batch()
entity = {
'PartitionKey': 'pk001',
'Foo': os.urandom(1024*64),
'Bar': os.urandom(1024*64),
'Baz': os.urandom(1024*64)
}
for i in range(50):
entity['RowKey'] = str(i)
batch.create_entity(entity)

with pytest.raises(RequestTooLargeError):
self.table.send_batch(batch)

finally:
self._tear_down()



class TestTableUnitTest(TableTestCase):
Expand Down
34 changes: 31 additions & 3 deletions sdk/tables/azure-data-tables/tests/test_table_batch_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@

import pytest

import uuid
from datetime import datetime, timedelta
from dateutil.tz import tzutc
import os
import sys
import uuid

from devtools_testutils import AzureTestCase

Expand All @@ -31,7 +32,8 @@
EdmType,
BatchErrorException,
generate_table_sas,
TableSasPermissions
TableSasPermissions,
RequestTooLargeError
)

from _shared.asynctestcase import AsyncTableTestCase
Expand Down Expand Up @@ -760,4 +762,30 @@ async def test_batch_sas_auth(self, tables_storage_account_name, tables_primary_

assert total_entities == transaction_count
finally:
await self._tear_down()
await self._tear_down()

@pytest.mark.live_test_only # Request bodies are very large
@TablesPreparer()
async def test_batch_request_too_large(self, tables_storage_account_name, tables_primary_storage_account_key):
# Arrange
await self._set_up(tables_storage_account_name, tables_primary_storage_account_key)
from azure.data.tables import RequestTooLargeError
try:

batch = self.table.create_batch()
entity = {
'PartitionKey': 'pk001',
'Foo': os.urandom(1024*64),
'Bar': os.urandom(1024*64),
'Baz': os.urandom(1024*64)
}
for i in range(50):
entity['RowKey'] = str(i)
batch.create_entity(entity)

with pytest.raises(RequestTooLargeError):
await self.table.send_batch(batch)

finally:
await self._tear_down()

35 changes: 30 additions & 5 deletions sdk/tables/azure-data-tables/tests/test_table_batch_cosmos.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
# --------------------------------------------------------------------------
from datetime import datetime
from dateutil.tz import tzutc
import os
import sys
from time import sleep
import uuid

import pytest
Expand All @@ -19,8 +19,6 @@
from azure.core.exceptions import (
ResourceExistsError,
ResourceNotFoundError,
HttpResponseError,
ClientAuthenticationError
)
from azure.data.tables import (
EdmType,
Expand All @@ -30,7 +28,9 @@
BatchErrorException,
TableServiceClient,
TableEntity,
UpdateMode
UpdateMode,

RequestTooLargeError
)

from _shared.testcase import TableTestCase, SLEEP_DELAY
Expand Down Expand Up @@ -66,7 +66,7 @@ def _tear_down(self):
self.ts.delete_table(table_name)
except:
pass
sleep(SLEEP_DELAY)
self.sleep(SLEEP_DELAY)

#--Helpers-----------------------------------------------------------------

Expand Down Expand Up @@ -607,3 +607,28 @@ def test_new_delete_nonexistent_entity(self, tables_cosmos_account_name, tables_

finally:
self._tear_down()

@pytest.mark.skipif(sys.version_info < (3, 0), reason="requires Python3")
@pytest.mark.live_test_only # Request bodies are very large
@CosmosPreparer()
def test_batch_request_too_large(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
self._set_up(tables_cosmos_account_name, tables_primary_cosmos_account_key)
try:

batch = self.table.create_batch()
entity = {
'PartitionKey': 'pk001',
'Foo': os.urandom(1024*64),
'Bar': os.urandom(1024*64),
'Baz': os.urandom(1024*64)
}
for i in range(20):
entity['RowKey'] = str(i)
batch.create_entity(entity)

with pytest.raises(RequestTooLargeError):
self.table.send_batch(batch)

finally:
self._tear_down()
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@

from datetime import datetime
from dateutil.tz import tzutc
import os
import sys
from time import sleep
import uuid

import pytest
Expand All @@ -28,7 +28,8 @@
UpdateMode,
EntityProperty,
EdmType,
BatchErrorException
BatchErrorException,
RequestTooLargeError
)
from azure.data.tables.aio import TableServiceClient

Expand Down Expand Up @@ -666,3 +667,28 @@ async def test_new_delete_nonexistent_entity(self, tables_cosmos_account_name, t

finally:
await self._tear_down()

@pytest.mark.live_test_only # Request bodies are very large
@CosmosPreparer()
async def test_batch_request_too_large(self, tables_cosmos_account_name, tables_primary_cosmos_account_key):
# Arrange
await self._set_up(tables_cosmos_account_name, tables_primary_cosmos_account_key)
try:

batch = self.table.create_batch()
entity = {
'PartitionKey': 'pk001',
'Foo': os.urandom(1024*64),
'Bar': os.urandom(1024*64),
'Baz': os.urandom(1024*64)
}
for i in range(20):
entity['RowKey'] = str(i)
batch.create_entity(entity)

with pytest.raises(RequestTooLargeError):
await self.table.send_batch(batch)

finally:
await self._tear_down()