diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py index 08c2b8158c6b..d88cb4b417ca 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_tablestoragecs.py @@ -2,10 +2,49 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- +from typing import Dict, Optional, Any, Iterable, Union +import datetime +import time +import logging +import calendar +import dateutil.parser +from azure.core import MatchConditions +from azure.eventhub import CheckpointStore # type: ignore # pylint: disable=no-name-in-module +from azure.eventhub.exceptions import OwnershipLostError # type: ignore +from azure.core.exceptions import ( + ResourceModifiedError, + ResourceExistsError, + ResourceNotFoundError, +) +from ._vendor.data.tables import TableClient, UpdateMode +from ._vendor.data.tables._base_client import parse_connection_str -class TableCheckpointStore: +logger = logging.getLogger(__name__) + + +def _utc_to_local(utc_dt): + timestamp = calendar.timegm(utc_dt.timetuple()) + local_dt = datetime.datetime.fromtimestamp(timestamp) + return local_dt.replace(microsecond=utc_dt.microsecond) + + +def _to_timestamp(date): + timestamp = None + if not date: + return timestamp + try: + timestamp = date.timestamp() + except AttributeError: # python2.7 compatible + timestamp = time.mktime(_utc_to_local(date).timetuple()) + timestamp += date.microsecond / 1e6 + return timestamp + + +class TableCheckpointStore(CheckpointStore): """A CheckpointStore that uses Azure Table Storage to store the partition ownership and checkpoint data. + This class implements methods list_ownership, claim_ownership, update_checkpoint and list_checkpoints. + :param str table_account_url: The URI to the storage account. :param table_name: @@ -17,22 +56,306 @@ class TableCheckpointStore: shared access key, or an instance of a TokenCredentials class from azure.identity. If the URL already has a SAS token, specifying an explicit credential will take priority. :keyword str api_version: - The Storage API version to use for requests. Default value is '2019-07-07'. - :keyword str secondary_hostname: - The hostname of the secondary endpoint. + The Storage API version to use for requests. Default value is '2018-03-28'. """ - def __init__(self, **kwargs): - pass + def __init__(self, table_account_url, table_name, credential=None, **kwargs): + # type: (str, str, Optional[Any], Any) -> None + self._table_client = kwargs.pop("table_client", None) + if not self._table_client: + api_version = kwargs.pop("api_version", None) + if api_version: + headers = kwargs.get("headers") + if headers: + headers["x-ms-version"] = api_version + else: + kwargs["headers"] = {"x-ms-version": api_version} + self._table_client = TableClient( + table_account_url, table_name, credential=credential, **kwargs + ) + + @classmethod + def from_connection_string(cls, conn_str, table_name, credential=None, **kwargs): + # type: (str, str, Optional[Any], Any) -> TableCheckpointStore + """Create TableCheckpointStore from a storage connection string. + + :param str conn_str: + A connection string to an Azure Storage account. + :param table_name: + The table name. + :type table_name: str + :param credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token, or the connection string already has shared + access key values. The value can be a SAS token string, an account shared access + key, or an instance of a TokenCredentials class from azure.identity. + Credentials provided here will take precedence over those in the connection string. + :keyword str api_version: + The Storage API version to use for requests. Default value is '2018-03-28'. + :returns: A table checkpoint store. + :rtype: ~azure.eventhub.extensions.checkpointstoretable.TableCheckpointStore + """ + endpoint, credential = parse_connection_str( + conn_str=conn_str, credential=None, keyword_args=kwargs + ) + return cls(endpoint, table_name=table_name, credential=credential, **kwargs) + + def __enter__(self): + self._table_client.__enter__() + return self - def list_ownership(self, namespace, eventhub, consumergroup, **kwargs): - pass + def __exit__(self, *args): + self._table_client.__exit__(*args) - def list_checkpoints(self, namespace, eventhub, consumergroup, **kwargs): - pass + @classmethod + def _create_ownership_entity(cls, ownership): + """ + Create a dictionary with the `ownership` attributes. + """ + ownership_entity = { + "PartitionKey": "{} {} {} Ownership".format( + ownership["fully_qualified_namespace"], + ownership["eventhub_name"], + ownership["consumer_group"], + ), + "RowKey": ownership["partition_id"], + "ownerid": ownership["owner_id"], + } + return ownership_entity + + @classmethod + def _create_checkpoint_entity(cls, checkpoint): + """ + Create a dictionary with `checkpoint` attributes. + """ + checkpoint_entity = { + "PartitionKey": "{} {} {} Checkpoint".format( + checkpoint["fully_qualified_namespace"], + checkpoint["eventhub_name"], + checkpoint["consumer_group"], + ), + "RowKey": checkpoint["partition_id"], + "offset": checkpoint["offset"], + "sequencenumber": checkpoint["sequence_number"], + } + return checkpoint_entity + + def _update_ownership(self, ownership, **kwargs): + """_update_ownership mutates the passed in ownership.""" + try: + ownership_entity = TableCheckpointStore._create_ownership_entity(ownership) + metadata = self._table_client.update_entity( + mode=UpdateMode.REPLACE, + entity=ownership_entity, + etag=ownership["etag"], + match_condition=MatchConditions.IfNotModified, + **kwargs + ) + ownership["etag"] = metadata["etag"] + updated_entity = self._table_client.get_entity( + partition_key=ownership_entity["PartitionKey"], + row_key=ownership_entity["RowKey"], + **kwargs + ) + ownership["last_modified_time"] = _to_timestamp( + updated_entity.metadata.get("timestamp") + ) + except (ResourceNotFoundError, ValueError): + metadata = self._table_client.create_entity( + entity=ownership_entity, headers={"Prefer": "return-content"}, **kwargs + ) + ownership["etag"] = metadata["etag"] + ownership["last_modified_time"] = _to_timestamp( + dateutil.parser.isoparse(metadata["content"]["Timestamp"]) + ) + + def _claim_one_partition(self, ownership, **kwargs): + new_ownership = ownership.copy() + try: + self._update_ownership(new_ownership, **kwargs) + return new_ownership + except (ResourceModifiedError, ResourceExistsError): + logger.info( + "EventProcessor instance %r of namespace %r eventhub %r consumer group %r " + "lost ownership to partition %r", + new_ownership["owner_id"], + new_ownership["fully_qualified_namespace"], + new_ownership["eventhub_name"], + new_ownership["consumer_group"], + new_ownership["partition_id"], + ) + raise OwnershipLostError() + except Exception as error: # pylint:disable=broad-except + logger.warning( + "An exception occurred when EventProcessor instance %r claim_ownership for " + "namespace %r eventhub %r consumer group %r partition %r. " + "The ownership is now lost. Exception " + "is %r", + new_ownership["owner_id"], + new_ownership["fully_qualified_namespace"], + new_ownership["eventhub_name"], + new_ownership["consumer_group"], + new_ownership["partition_id"], + error, + ) + return new_ownership # Keep the ownership if an unexpected error happens + + def list_ownership( + self, fully_qualified_namespace, eventhub_name, consumer_group, **kwargs + ): + # type: (str, str, str, Any) -> Iterable[Dict[str, Any]] + """Retrieves a complete ownership list from the storage table. + + :param str fully_qualified_namespace: The fully qualified namespace that the Event Hub belongs to. + The format is like ".servicebus.windows.net". + :param str eventhub_name: The name of the specific Event Hub the partition ownerships are associated with, + relative to the Event Hubs namespace that contains it. + :param str consumer_group: The name of the consumer group the ownerships are associated with. + :rtype: Iterable[Dict[str, Any]], Iterable of dictionaries containing partition ownership information: + - `fully_qualified_namespace` (str): The fully qualified namespace that the Event Hub belongs to. + The format is like ".servicebus.windows.net". + - `eventhub_name` (str): The name of the specific Event Hub the checkpoint is associated with, + relative to the Event Hubs namespace that contains it. + - `consumer_group` (str): The name of the consumer group the ownership are associated with. + - `partition_id` (str): The partition ID which the checkpoint is created for. + - `owner_id` (str): A UUID representing the current owner of this partition. + - `last_modified_time` (float): The last time this ownership was claimed. + - `etag` (str): The Etag value for the last time this ownership was modified. Optional depending + on storage implementation. + """ + try: + partition_key = "{} {} {} Ownership".format( + fully_qualified_namespace, eventhub_name, consumer_group + ) + partition_key_filter = "PartitionKey eq '{}'".format(partition_key) + entities = self._table_client.query_entities(partition_key_filter, **kwargs) + result = [] + for entity in entities: + ownership = { + "fully_qualified_namespace": fully_qualified_namespace, + "eventhub_name": eventhub_name, + "consumer_group": consumer_group, + "partition_id": entity[u"RowKey"], + "owner_id": entity[u"ownerid"], + "last_modified_time": _to_timestamp( + entity.metadata.get("timestamp") + ), + "etag": entity.metadata.get("etag"), + } + result.append(ownership) + return result + except Exception as error: + logger.warning( + "An exception occurred during list_ownership for " + "namespace %r eventhub %r consumer group %r. " + "Exception is %r", + fully_qualified_namespace, + eventhub_name, + consumer_group, + error, + ) + raise + + def list_checkpoints( + self, fully_qualified_namespace, eventhub_name, consumer_group, **kwargs + ): + # type: (str, str, str, Any) -> Iterable[Dict[str, Any]] + """List the updated checkpoints from the storage table. + + :param str fully_qualified_namespace: The fully qualified namespace that the Event Hub belongs to. + The format is like ".servicebus.windows.net". + :param str eventhub_name: The name of the specific Event Hub the checkpoints are associated with, relative to + the Event Hubs namespace that contains it. + :param str consumer_group: The name of the consumer group the checkpoints are associated with. + :rtype: Iterable[Dict[str,Any]], Iterable of dictionaries containing partition checkpoint information: + - `fully_qualified_namespace` (str): The fully qualified namespace that the Event Hub belongs to. + The format is like ".servicebus.windows.net". + - `eventhub_name` (str): The name of the specific Event Hub the checkpoints are associated with, + relative to the Event Hubs namespace that contains it. + - `consumer_group` (str): The name of the consumer group the checkpoints are associated with. + - `partition_id` (str): The partition ID which the checkpoint is created for. + - `sequence_number` (int): The sequence number of the :class:`EventData`. + - `offset` (str): The offset of the :class:`EventData`. + """ + partition_key = "{} {} {} Checkpoint".format( + fully_qualified_namespace, eventhub_name, consumer_group + ) + partition_key_filter = "PartitionKey eq '{}'".format(partition_key) + entities = self._table_client.query_entities(partition_key_filter, **kwargs) + checkpoints_list = [] + for entity in entities: + checkpoint = { + "fully_qualified_namespace": fully_qualified_namespace, + "eventhub_name": eventhub_name, + "consumer_group": consumer_group, + "partition_id": entity[u"RowKey"], + "sequence_number": entity[u"sequencenumber"], + "offset": str(entity[u"offset"]), + } + checkpoints_list.append(checkpoint) + return checkpoints_list def update_checkpoint(self, checkpoint, **kwargs): - pass + # type: (Dict[str, Optional[Union[str, int]]], Any) -> None + """Updates the checkpoint using the given information for the offset, associated partition and + consumer group in the storage table. + + Note: If you plan to implement a custom checkpoint store with the intention of running between + cross-language EventHubs SDKs, it is recommended to persist the offset value as an integer. + :param Dict[str,Any] checkpoint: A dict containing checkpoint information: + - `fully_qualified_namespace` (str): The fully qualified namespace that the Event Hub belongs to. + The format is like ".servicebus.windows.net". + - `eventhub_name` (str): The name of the specific Event Hub the checkpoint is associated with, + relative to the Event Hubs namespace that contains it. + - `consumer_group` (str): The name of the consumer group the checkpoint is associated with. + - `partition_id` (str): The partition ID which the checkpoint is created for. + - `sequence_number` (int): The sequence number of the :class:`EventData` + the new checkpoint will be associated with. + - `offset` (str): The offset of the :class:`EventData` + the new checkpoint will be associated with. + :rtype: None + """ + checkpoint_entity = TableCheckpointStore._create_checkpoint_entity( + checkpoint + ) + entity_name = "{}/{}/{}/checkpoint/{}".format( + checkpoint["fully_qualified_namespace"], + checkpoint["eventhub_name"], + checkpoint["consumer_group"], + checkpoint["partition_id"], + ) + try: + self._table_client.update_entity( + mode=UpdateMode.REPLACE, entity=checkpoint_entity, **kwargs + ) + except ResourceNotFoundError: + logger.info( + "Create checkpoint entity %r because it hasn't existed in the table yet.", + entity_name, + ) + self._table_client.create_entity(entity=checkpoint_entity, **kwargs) + + def claim_ownership(self, ownership_list, **kwargs): + # type: (Iterable[Dict[str, Any]], Any) -> Iterable[Dict[str, Any]] + """Tries to claim ownership for a list of specified partitions. + + :param Iterable[Dict[str,Any]] ownership_list: Iterable of dictionaries containing all the ownerships to claim. + :rtype: Iterable[Dict[str,Any]], Iterable of dictionaries containing partition ownership information: + - `fully_qualified_namespace` (str): The fully qualified namespace that the Event Hub belongs to. + The format is like ".servicebus.windows.net". + - `eventhub_name` (str): The name of the specific Event Hub the checkpoint is associated with, + relative to the Event Hubs namespace that contains it. + - `consumer_group` (str): The name of the consumer group the ownership are associated with. + - `partition_id` (str): The partition ID which the checkpoint is created for. + - `owner_id` (str): A UUID representing the owner attempting to claim this partition. + - `last_modified_time` (float): The last time this ownership was claimed. + - `etag` (str): The Etag value for the last time this ownership was modified. Optional depending + on storage implementation. + """ + gathered_results = [] + for x in ownership_list: + gathered_results.append(self._claim_one_partition(x, **kwargs)) + return gathered_results - def claim_ownership(self, ownershiplist, **kwargs): - pass + def close(self): + self._container_client.__exit__() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/__init__.py new file mode 100644 index 000000000000..0d1f7edf5dc6 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/__init__.py new file mode 100644 index 000000000000..0d1f7edf5dc6 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/__init__.py new file mode 100644 index 000000000000..c989f9191b88 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/__init__.py @@ -0,0 +1,52 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from ._entity import TableEntity, EntityProperty, EdmType +from ._error import RequestTooLargeError, TableTransactionError, TableErrorCode +from ._table_shared_access_signature import generate_table_sas, generate_account_sas +from ._table_client import TableClient +from ._table_service_client import TableServiceClient +from ._models import ( + TableAccessPolicy, + TableMetrics, + TableRetentionPolicy, + TableAnalyticsLogging, + TableSasPermissions, + TableCorsRule, + UpdateMode, + SASProtocol, + TableItem, + ResourceTypes, + AccountSasPermissions, + TransactionOperation +) +from ._version import VERSION + +__version__ = VERSION + +__all__ = [ + "TableClient", + "TableServiceClient", + "ResourceTypes", + "AccountSasPermissions", + "TableErrorCode", + "TableSasPermissions", + "TableAccessPolicy", + "TableAnalyticsLogging", + "TableMetrics", + "generate_account_sas", + "TableCorsRule", + "UpdateMode", + "TableItem", + "TableEntity", + "EntityProperty", + "EdmType", + "TableRetentionPolicy", + "generate_table_sas", + "SASProtocol", + "TableTransactionError", + "TransactionOperation", + "RequestTooLargeError", +] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_authentication.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_authentication.py new file mode 100644 index 000000000000..95d80dfd0844 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_authentication.py @@ -0,0 +1,135 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import logging +from typing import TYPE_CHECKING + +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse # type: ignore + +from azure.core.exceptions import ClientAuthenticationError +from azure.core.pipeline.policies import SansIOHTTPPolicy + +try: + from azure.core.pipeline.transport import AsyncHttpTransport +except ImportError: + AsyncHttpTransport = None # type: ignore + +try: + from yarl import URL +except ImportError: + pass + +from ._common_conversion import ( + _sign_string, +) + +from ._error import ( + _wrap_exception, +) + +if TYPE_CHECKING: + from azure.core.pipeline import PipelineRequest # pylint: disable=ungrouped-imports + + +logger = logging.getLogger(__name__) + + +class AzureSigningError(ClientAuthenticationError): + """ + Represents a fatal error when attempting to sign a request. + In general, the cause of this exception is user error. For example, the given account key is not valid. + Please visit https://docs.microsoft.com/en-us/azure/storage/common/storage-create-storage-account for more info. + """ + + +# pylint: disable=no-self-use +class SharedKeyCredentialPolicy(SansIOHTTPPolicy): + def __init__(self, credential, is_emulated=False): + self._credential = credential + self.is_emulated = is_emulated + + def _get_headers(self, request, headers_to_sign): + headers = dict( + (name.lower(), value) for name, value in request.headers.items() if value + ) + if "content-length" in headers and headers["content-length"] == "0": + del headers["content-length"] + return "\n".join(headers.get(x, "") for x in headers_to_sign) + "\n" + + def _get_verb(self, request): + return request.method + "\n" + + def _get_canonicalized_resource(self, request): + uri_path = urlparse(request.http_request.url).path + try: + if ( + isinstance(request.context.transport, AsyncHttpTransport) + or isinstance( + getattr(request.context.transport, "_transport", None), + AsyncHttpTransport, + ) + or isinstance( + getattr( + getattr(request.context.transport, "_transport", None), + "_transport", + None, + ), + AsyncHttpTransport, + ) + ): + uri_path = URL(uri_path) + return "/" + self._credential.named_key.name + str(uri_path) + except TypeError: + pass + return "/" + self._credential.named_key.name + uri_path + + def _get_canonicalized_headers(self, request): + string_to_sign = "" + x_ms_headers = [] + for name, value in request.headers.items(): + if name.startswith("x-ms-"): + x_ms_headers.append((name.lower(), value)) + x_ms_headers.sort() + for name, value in x_ms_headers: + if value is not None: + string_to_sign += "".join([name, ":", value, "\n"]) + return string_to_sign + + def _add_authorization_header(self, request, string_to_sign): + try: + signature = _sign_string(self._credential.named_key.key, string_to_sign) + auth_string = "SharedKey " + self._credential.named_key.name + ":" + signature + request.headers["Authorization"] = auth_string + except Exception as ex: + # Wrap any error that occurred as signing error + # Doing so will clarify/locate the source of problem + raise _wrap_exception(ex, AzureSigningError) + + def on_request(self, request): + # type: (PipelineRequest) -> None + self.sign_request(request) + + def sign_request(self, request): + string_to_sign = ( + self._get_verb(request.http_request) + + self._get_headers( + request.http_request, + ["content-md5", "content-type", "x-ms-date"], + ) + + self._get_canonicalized_resource(request) + + self._get_canonicalized_resource_query(request.http_request) + ) + self._add_authorization_header(request.http_request, string_to_sign) + logger.debug("String_to_sign=%s", string_to_sign) + + def _get_canonicalized_resource_query(self, request): + for name, value in request.query.items(): + if name == "comp": + return "?comp=" + value + return "" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_base_client.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_base_client.py new file mode 100644 index 000000000000..58c83d844b96 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_base_client.py @@ -0,0 +1,417 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import Dict, Optional, Any, List, Mapping, Union, TYPE_CHECKING +from uuid import uuid4 +try: + from urllib.parse import parse_qs, quote, urlparse +except ImportError: + from urlparse import parse_qs, urlparse # type: ignore + from urllib2 import quote # type: ignore + +from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential +from azure.core.utils import parse_connection_string +from azure.core.pipeline.transport import ( + HttpTransport, + HttpRequest, +) +from azure.core.pipeline.policies import ( + RedirectPolicy, + ContentDecodePolicy, + BearerTokenCredentialPolicy, + ProxyPolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + UserAgentPolicy, + AzureSasCredentialPolicy, + NetworkTraceLoggingPolicy, + CustomHookPolicy, + RequestIdPolicy, +) + +from ._generated import AzureTable +from ._common_conversion import _is_cosmos_endpoint +from ._shared_access_signature import QueryStringConstants +from ._constants import ( + STORAGE_OAUTH_SCOPE, + SERVICE_HOST_BASE, +) +from ._error import RequestTooLargeError, TableTransactionError, _decode_error +from ._models import LocationMode +from ._authentication import SharedKeyCredentialPolicy +from ._policies import ( + CosmosPatchTransformPolicy, + StorageHeadersPolicy, + StorageHosts, + TablesRetryPolicy, +) +from ._sdk_moniker import SDK_MONIKER + +if TYPE_CHECKING: + from azure.core.credentials import TokenCredential + +_SUPPORTED_API_VERSIONS = ["2019-02-02", "2019-07-07"] + + +def get_api_version(kwargs, default): + # type: (Dict[str, Any], str) -> str + api_version = kwargs.pop("api_version", None) + if api_version and api_version not in _SUPPORTED_API_VERSIONS: + versions = "\n".join(_SUPPORTED_API_VERSIONS) + raise ValueError( + "Unsupported API version '{}'. Please select from:\n{}".format( + api_version, versions + ) + ) + return api_version or default + + +class AccountHostsMixin(object): # pylint: disable=too-many-instance-attributes + def __init__( + self, + account_url, # type: Any + credential=None, # type: Optional[Union[AzureNamedKeyCredential, AzureSasCredential, "TokenCredential"]] + **kwargs # type: Any + ): + # type: (...) -> None + try: + if not account_url.lower().startswith("http"): + account_url = "https://" + account_url + except AttributeError: + raise ValueError("Account URL must be a string.") + parsed_url = urlparse(account_url.rstrip("/")) + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(account_url)) + + _, sas_token = parse_query(parsed_url.query) + if not sas_token and not credential: + raise ValueError( + "You need to provide either an AzureSasCredential or AzureNamedKeyCredential" + ) + self._query_str, credential = format_query_string(sas_token, credential) + self._location_mode = kwargs.get("location_mode", LocationMode.PRIMARY) + self._hosts = kwargs.get("_hosts") + self.scheme = parsed_url.scheme + self._cosmos_endpoint = _is_cosmos_endpoint(parsed_url) + if ".core." in parsed_url.netloc or ".cosmos." in parsed_url.netloc: + account = parsed_url.netloc.split(".table.core.") + if "cosmos" in parsed_url.netloc: + account = parsed_url.netloc.split(".table.cosmos.") + self.account_name = account[0] if len(account) > 1 else None + else: + path_account_name = parsed_url.path.split("/") + if len(path_account_name) > 1: + self.account_name = path_account_name[1] + account = [self.account_name, parsed_url.netloc] + else: + # If format doesn't fit Azurite, default to standard parsing + account = parsed_url.netloc.split(".table.core.") + self.account_name = account[0] if len(account) > 1 else None + + secondary_hostname = None + self.credential = credential + if self.scheme.lower() != "https" and hasattr(self.credential, "get_token"): + raise ValueError("Token credential is only supported with HTTPS.") + if hasattr(self.credential, "named_key"): + self.account_name = self.credential.named_key.name # type: ignore + secondary_hostname = "{}-secondary.table.{}".format( + self.credential.named_key.name, SERVICE_HOST_BASE # type: ignore + ) + + if not self._hosts: + if len(account) > 1: + secondary_hostname = parsed_url.netloc.replace( + account[0], account[0] + "-secondary" + ) + parsed_url.path.replace( + account[0], account[0] + "-secondary" + ).rstrip("/") + if kwargs.get("secondary_hostname"): + secondary_hostname = kwargs["secondary_hostname"] + primary_hostname = (parsed_url.netloc + parsed_url.path).rstrip("/") + self._hosts = { + LocationMode.PRIMARY: primary_hostname, + LocationMode.SECONDARY: secondary_hostname, + } + self._credential_policy = None # type: ignore + self._configure_credential(self.credential) # type: ignore + self._policies = self._configure_policies(hosts=self._hosts, **kwargs) # type: ignore + if self._cosmos_endpoint: + self._policies.insert(0, CosmosPatchTransformPolicy()) + + @property + def url(self): + """The full endpoint URL to this entity, including SAS token if used. + + This could be either the primary endpoint, + or the secondary endpoint depending on the current :func:`location_mode`. + """ + return self._format_url(self._hosts[self._location_mode]) + + @property + def _primary_endpoint(self): + """The full primary endpoint URL. + + :type: str + """ + return self._format_url(self._hosts[LocationMode.PRIMARY]) + + @property + def _primary_hostname(self): + """The hostname of the primary endpoint. + + :type: str + """ + return self._hosts[LocationMode.PRIMARY] + + @property + def _secondary_endpoint(self): + """The full secondary endpoint URL if configured. + + If not available a ValueError will be raised. To explicitly specify a secondary hostname, use the optional + `secondary_hostname` keyword argument on instantiation. + + :type: str + :raise ValueError: + """ + if not self._hosts[LocationMode.SECONDARY]: + raise ValueError("No secondary host configured.") + return self._format_url(self._hosts[LocationMode.SECONDARY]) + + @property + def _secondary_hostname(self): + """The hostname of the secondary endpoint. + + If not available this will be None. To explicitly specify a secondary hostname, use the optional + `secondary_hostname` keyword argument on instantiation. + + :type: str or None + """ + return self._hosts[LocationMode.SECONDARY] + + @property + def api_version(self): + """The version of the Storage API used for requests. + + :type: str + """ + return self._client._config.version # pylint: disable=protected-access + + +class TablesBaseClient(AccountHostsMixin): + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, + endpoint, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + credential = kwargs.pop('credential', None) + super(TablesBaseClient, self).__init__(endpoint, credential=credential, **kwargs) + self._client = AzureTable( + self.url, + policies=kwargs.pop('policies', self._policies), + **kwargs + ) + self._client._config.version = get_api_version(kwargs, self._client._config.version) # pylint: disable=protected-access + + def __enter__(self): + self._client.__enter__() + return self + + def __exit__(self, *args): + self._client.__exit__(*args) + + def _configure_policies(self, **kwargs): + return [ + RequestIdPolicy(**kwargs), + StorageHeadersPolicy(**kwargs), + UserAgentPolicy(sdk_moniker=SDK_MONIKER, **kwargs), + ProxyPolicy(**kwargs), + self._credential_policy, + ContentDecodePolicy(response_encoding="utf-8"), + RedirectPolicy(**kwargs), + StorageHosts(**kwargs), + TablesRetryPolicy(**kwargs), + CustomHookPolicy(**kwargs), + NetworkTraceLoggingPolicy(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] + + def _configure_credential(self, credential): + # type: (Any) -> None + if hasattr(credential, "get_token"): + self._credential_policy = BearerTokenCredentialPolicy( # type: ignore + credential, STORAGE_OAUTH_SCOPE + ) + elif isinstance(credential, SharedKeyCredentialPolicy): + self._credential_policy = credential # type: ignore + elif isinstance(credential, AzureSasCredential): + self._credential_policy = AzureSasCredentialPolicy(credential) # type: ignore + elif isinstance(credential, AzureNamedKeyCredential): + self._credential_policy = SharedKeyCredentialPolicy(credential) # type: ignore + elif credential is not None: + raise TypeError("Unsupported credential: {}".format(credential)) + + def _batch_send(self, *reqs, **kwargs): + # type: (List[HttpRequest], Any) -> List[Mapping[str, Any]] + """Given a series of request, do a Storage batch call.""" + # Pop it here, so requests doesn't feel bad about additional kwarg + policies = [StorageHeadersPolicy()] + + changeset = HttpRequest("POST", None) # type: ignore + changeset.set_multipart_mixed( + *reqs, policies=policies, boundary="changeset_{}".format(uuid4()) # type: ignore + ) + request = self._client._client.post( # pylint: disable=protected-access + url="https://{}/$batch".format(self._primary_hostname), + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": "3.0", + "MaxDataServiceVersion": "3.0;NetFx", + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + ) + request.set_multipart_mixed( + changeset, + policies=policies, + enforce_https=False, + boundary="batch_{}".format(uuid4()), + ) + pipeline_response = self._client._client._pipeline.run(request, **kwargs) # pylint: disable=protected-access + response = pipeline_response.http_response + if response.status_code == 413: + raise _decode_error( + response, + error_message="The transaction request was too large", + error_type=RequestTooLargeError) + if response.status_code != 202: + raise _decode_error(response) + + parts = list(response.parts()) + error_parts = [p for p in parts if not 200 <= p.status_code < 300] + if any(error_parts): + if error_parts[0].status_code == 413: + raise _decode_error( + response, + error_message="The transaction request was too large", + error_type=RequestTooLargeError) + raise _decode_error( + response=error_parts[0], + error_type=TableTransactionError + ) + return [extract_batch_part_metadata(p) for p in parts] + + def close(self): + # type: () -> None + """This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + self._client.close() + + +class TransportWrapper(HttpTransport): + """Wrapper class that ensures that an inner client created + by a `get_client` method does not close the outer transport for the parent + when used in a context manager. + """ + def __init__(self, transport): + self._transport = transport + + def send(self, request, **kwargs): + return self._transport.send(request, **kwargs) + + def open(self): + pass + + def close(self): + pass + + def __enter__(self): + pass + + def __exit__(self, *args): # pylint: disable=arguments-differ + pass + + +def parse_connection_str(conn_str, credential, keyword_args): + conn_settings = parse_connection_string(conn_str) + primary = None + secondary = None + if not credential: + try: + credential = AzureNamedKeyCredential(name=conn_settings["accountname"], key=conn_settings["accountkey"]) + except KeyError: + credential = conn_settings.get("sharedaccesssignature", None) + if not credential: + raise ValueError("Connection string missing required connection details.") + credential = AzureSasCredential(credential) + primary = conn_settings.get("tableendpoint") + secondary = conn_settings.get("tablesecondaryendpoint") + if not primary: + if secondary: + raise ValueError("Connection string specifies only secondary endpoint.") + try: + primary = "{}://{}.table.{}".format( + conn_settings["defaultendpointsprotocol"], + conn_settings["accountname"], + conn_settings["endpointsuffix"], + ) + secondary = "{}-secondary.table.{}".format( + conn_settings["accountname"], conn_settings["endpointsuffix"] + ) + except KeyError: + pass + + if not primary: + try: + primary = "https://{}.table.{}".format( + conn_settings["accountname"], + conn_settings.get("endpointsuffix", SERVICE_HOST_BASE), + ) + except KeyError: + raise ValueError("Connection string missing required connection details.") + + if "secondary_hostname" not in keyword_args: + keyword_args["secondary_hostname"] = secondary + + return primary, credential + + +def extract_batch_part_metadata(response_part): + metadata = {} + if 'Etag' in response_part.headers: + metadata['etag'] = response_part.headers['Etag'] + return metadata + + +def format_query_string(sas_token, credential): + query_str = "?" + if sas_token and isinstance(credential, AzureSasCredential): + raise ValueError( + "You cannot use AzureSasCredential when the resource URI also contains a Shared Access Signature.") + if sas_token and not credential: + query_str += sas_token + elif credential: + return "", credential + return query_str.rstrip("?&"), None + + +def parse_query(query_str): + sas_values = QueryStringConstants.to_list() + parsed_query = {k: v[0] for k, v in parse_qs(query_str).items()} + sas_params = [ + "{}={}".format(k, quote(v, safe="")) + for k, v in parsed_query.items() + if k in sas_values + ] + sas_token = None + if sas_params: + sas_token = "&".join(sas_params) + + snapshot = parsed_query.get("snapshot") or parsed_query.get("sharesnapshot") + return snapshot, sas_token diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_common_conversion.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_common_conversion.py new file mode 100644 index 000000000000..180dc2193a0c --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_common_conversion.py @@ -0,0 +1,103 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import base64 +import hashlib +import datetime +import hmac +from sys import version_info +import six + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + + +if version_info < (3,): + + def _str(value): + if isinstance(value, unicode): # pylint: disable=undefined-variable + return value.encode("utf-8") + + return str(value) +else: + _str = str + + +def _to_str(value): + return _str(value) if value is not None else None + + +def _to_utc_datetime(value): + try: + value = value.astimezone(TZ_UTC) + except ValueError: + # Before Python 3.8, this raised for a naive datetime. + pass + try: + return value.strftime("%Y-%m-%dT%H:%M:%S.%fZ") + except ValueError: + return value.strftime("%Y-%m-%dT%H:%M:%SZ") + + +def _encode_base64(data): + if isinstance(data, six.text_type): + data = data.encode("utf-8") + encoded = base64.b64encode(data) + return encoded.decode("utf-8") + + +def _decode_base64_to_bytes(data): + if isinstance(data, six.text_type): + data = data.encode("utf-8") + return base64.b64decode(data) + + +def _sign_string(key, string_to_sign, key_is_base64=True): + if key_is_base64: + key = _decode_base64_to_bytes(key) + else: + if isinstance(key, six.text_type): + key = key.encode("utf-8") + if isinstance(string_to_sign, six.text_type): + string_to_sign = string_to_sign.encode("utf-8") + signed_hmac_sha256 = hmac.HMAC(key, string_to_sign, hashlib.sha256) + digest = signed_hmac_sha256.digest() + encoded_digest = _encode_base64(digest) + return encoded_digest + + +def _is_cosmos_endpoint(url): + if ".table.cosmosdb." in url.hostname: + return True + if ".table.cosmos." in url.hostname: + return True + if url.hostname == "localhost" and url.port != 10002: + return True + return False + + +def _transform_patch_to_cosmos_post(request): + request.method = "POST" + request.headers["X-HTTP-Method"] = "MERGE" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_constants.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_constants.py new file mode 100644 index 000000000000..924b4c01bcf1 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_constants.py @@ -0,0 +1,19 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from ._generated._version import VERSION + +# default values for common package, in case it is used directly +DEFAULT_X_MS_VERSION = "2018-03-28" +X_MS_VERSION = VERSION + +# Live ServiceClient URLs +SERVICE_HOST_BASE = "core.windows.net" + +STORAGE_OAUTH_SCOPE = "https://storage.azure.com/.default" + +NEXT_TABLE_NAME = "x-ms-continuation-NextTableName" +NEXT_PARTITION_KEY = "x-ms-continuation-NextPartitionKey" +NEXT_ROW_KEY = "x-ms-continuation-NextRowKey" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_deserialize.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_deserialize.py new file mode 100644 index 000000000000..e81c5ada9a85 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_deserialize.py @@ -0,0 +1,276 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Union, Dict, Any, Optional + +from uuid import UUID +import logging +import datetime + +import six + +from ._entity import EntityProperty, EdmType, TableEntity +from ._common_conversion import _decode_base64_to_bytes, TZ_UTC + + +_LOGGER = logging.getLogger(__name__) + +try: + from urllib.parse import quote +except ImportError: + from urllib2 import quote # type: ignore + + +class TablesEntityDatetime(datetime.datetime): + + @property + def tables_service_value(self): + try: + return self._service_value + except AttributeError: + return "" + + +def url_quote(url): + return quote(url) + + +def get_enum_value(value): + if value is None or value in ["None", ""]: + return None + try: + return value.value + except AttributeError: + return value + + +def _from_entity_binary(value): + # type: (str) -> EntityProperty + return _decode_base64_to_bytes(value) + + +def _from_entity_int32(value): + # type: (str) -> int + return int(value) + + +def _from_entity_int64(value): + # type: (str) -> EntityProperty + return EntityProperty(int(value), EdmType.INT64) + + +def _from_entity_datetime(value): + # Cosmos returns this with a decimal point that throws an error on deserialization + cleaned_value = clean_up_dotnet_timestamps(value) + try: + dt_obj = TablesEntityDatetime.strptime(cleaned_value, "%Y-%m-%dT%H:%M:%S.%fZ").replace( + tzinfo=TZ_UTC + ) + except ValueError: + dt_obj = TablesEntityDatetime.strptime(cleaned_value, "%Y-%m-%dT%H:%M:%SZ").replace( + tzinfo=TZ_UTC + ) + dt_obj._service_value = value # pylint:disable=protected-access + return dt_obj + + +def clean_up_dotnet_timestamps(value): + # .NET has more decimal places than Python supports in datetime objects, this truncates + # values after 6 decimal places. + value = value.split(".") + ms = "" + if len(value) == 2: + ms = value[-1].replace("Z", "") + if len(ms) > 6: + ms = ms[:6] + ms = ms + "Z" + return ".".join([value[0], ms]) + + return value[0] + + +def deserialize_iso(value): + if not value: + return value + return _from_entity_datetime(value) + + +def _from_entity_guid(value): + return UUID(value) + + +def _from_entity_str(value): + # type: (Union[str, bytes]) -> str + if isinstance(value, six.binary_type): + return value.decode('utf-8') + return value + + +_EDM_TYPES = [ + EdmType.BINARY, + EdmType.INT64, + EdmType.GUID, + EdmType.DATETIME, + EdmType.STRING, + EdmType.INT32, + EdmType.DOUBLE, + EdmType.BOOLEAN, +] + +_ENTITY_TO_PYTHON_CONVERSIONS = { + EdmType.BINARY: _from_entity_binary, + EdmType.INT32: _from_entity_int32, + EdmType.INT64: _from_entity_int64, + EdmType.DOUBLE: float, + EdmType.DATETIME: _from_entity_datetime, + EdmType.GUID: _from_entity_guid, + EdmType.STRING: _from_entity_str, +} + + +def _convert_to_entity(entry_element): + """Convert json response to entity. + The entity format is: + { + "Address":"Mountain View", + "Age":23, + "AmountDue":200.23, + "CustomerCode@odata.type":"Edm.Guid", + "CustomerCode":"c9da6455-213d-42c9-9a79-3e9149a57833", + "CustomerSince@odata.type":"Edm.DateTime", + "CustomerSince":"2008-07-10T00:00:00", + "IsActive":true, + "NumberOfOrders@odata.type":"Edm.Int64", + "NumberOfOrders":"255", + "PartitionKey":"mypartitionkey", + "RowKey":"myrowkey" + } + """ + entity = TableEntity() + + properties = {} + edmtypes = {} + odata = {} + + for name, value in entry_element.items(): + if name.startswith("odata."): + odata[name[6:]] = value + elif name.endswith("@odata.type"): + edmtypes[name[:-11]] = value + else: + properties[name] = value + + # Partition key is a known property + partition_key = properties.pop("PartitionKey", None) + if partition_key: + entity["PartitionKey"] = partition_key + + # Row key is a known property + row_key = properties.pop("RowKey", None) + if row_key: + entity["RowKey"] = row_key + + # Timestamp is a known property + timestamp = properties.pop("Timestamp", None) + + for name, value in properties.items(): + mtype = edmtypes.get(name) + + # Add type for Int32/64 + if isinstance(value, int) and mtype is None: + mtype = EdmType.INT32 + + if value >= 2 ** 31 or value < (-(2 ** 31)): + mtype = EdmType.INT64 + + # Add type for String + try: + if isinstance(value, unicode) and mtype is None: # type: ignore + mtype = EdmType.STRING + except NameError: + if isinstance(value, str) and mtype is None: + mtype = EdmType.STRING + + # no type info, property should parse automatically + if not mtype: + entity[name] = value + elif mtype in [EdmType.STRING, EdmType.INT32]: + entity[name] = value + else: # need an object to hold the property + conv = _ENTITY_TO_PYTHON_CONVERSIONS.get(mtype) + if conv is not None: + new_property = conv(value) + else: + new_property = EntityProperty(mtype, value) + entity[name] = new_property + + # extract etag from entry + etag = odata.pop("etag", None) + odata.pop("metadata", None) + if timestamp: + if not etag: + etag = "W/\"datetime'" + url_quote(timestamp) + "'\"" + timestamp = _from_entity_datetime(timestamp) + odata.update({'etag': etag, 'timestamp': timestamp}) + entity._metadata = odata # pylint: disable=protected-access + return entity + + +def _extract_etag(response): + """ Extracts the etag from the response headers. """ + if response and response.headers: + return response.headers.get("etag") + + return None + + +def _extract_continuation_token(continuation_token): + """Extract list entity continuation headers from token. + + :param dict(str,str) continuation_token: The listing continuation token. + :returns: The next partition key and next row key in a tuple + :rtype: (str,str) + """ + if not continuation_token: + return None, None + try: + return continuation_token.get("PartitionKey"), continuation_token.get("RowKey") + except AttributeError: + raise ValueError("Invalid continuation token format.") + + +def _normalize_headers(headers): + normalized = {} + for key, value in headers.items(): + if key.startswith("x-ms-"): + key = key[5:] + normalized[key.lower().replace("-", "_")] = get_enum_value(value) + return normalized + + +def _return_headers_and_deserialized( + response, deserialized, response_headers +): # pylint: disable=unused-argument + return _normalize_headers(response_headers), deserialized + + +def _return_context_and_deserialized( + response, deserialized, response_headers +): # pylint: disable=unused-argument + return response.context['location_mode'], deserialized, response_headers + + +def _trim_service_metadata(metadata, content=None): + # type: (Dict[str, str], Optional[Dict[str, Any]]) -> Dict[str, Any] + result = { + "date": metadata.pop("date", None), + "etag": metadata.pop("etag", None), + "version": metadata.pop("version", None), + } + preference = metadata.pop('preference_applied', None) + if preference: + result["preference_applied"] = preference + result["content"] = content # type: ignore + return result diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_entity.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_entity.py new file mode 100644 index 000000000000..90a8febdbbb8 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_entity.py @@ -0,0 +1,73 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from enum import Enum +from typing import Any, Dict, Union, NamedTuple + + +class TableEntity(dict): + """ + An Entity dictionary with additional metadata + + """ + _metadata = {} # type: Dict[str, Any] + + @property + def metadata(self): + # type: () -> Dict[str, Any] + """Resets metadata to be a part of the entity + :return Dict of entity metadata + :rtype: Dict[str, Any] + """ + return self._metadata + + +class EdmType(str, Enum): + """ + Used by :class:`~.EntityProperty` to represent the type of the entity property + to be stored by the Table service. + """ + + BINARY = "Edm.Binary" + """ Represents byte data. This type will be inferred for Python bytes.. """ + + INT64 = "Edm.Int64" + """ Represents a number between -(2^31) and 2^31. This is the default type for Python numbers. """ + + GUID = "Edm.Guid" + """ Represents a GUID. This type will be inferred for uuid.UUID. """ + + DATETIME = "Edm.DateTime" + """ Represents a date. This type will be inferred for Python datetime objects. """ + + STRING = "Edm.String" + """ Represents a string. This type will be inferred for Python strings. """ + + INT32 = "Edm.Int32" + """ Represents a number between -(2^15) and 2^15. Must be specified or numbers will default to INT64. """ + + DOUBLE = "Edm.Double" + """ Represents a double. This type will be inferred for Python floating point numbers. """ + + BOOLEAN = "Edm.Boolean" + """ Represents a boolean. This type will be inferred for Python bools. """ + + +EntityProperty = NamedTuple("EntityProperty", [("value", Any), ("edm_type", Union[str, EdmType])]) +""" +An entity property. Used to explicitly set :class:`~EdmType` when necessary. + +Values which require explicit typing are GUID, INT64, and BINARY. Other EdmTypes +may be explicitly create as EntityProperty objects but need not be. For example, +the below with both create STRING typed properties on the entity:: + entity = TableEntity() + entity.a = 'b' + entity.x = EntityProperty('y', EdmType.STRING) + +:param value: +:type value: Any +:param edm_type: Type of the value +:type edm_type: str or :class:`~azure.data.tables.EdmType` +""" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_error.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_error.py new file mode 100644 index 000000000000..3cfff0bebf4f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_error.py @@ -0,0 +1,251 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import sys +from re import match +from enum import Enum + +from azure.core.exceptions import ( + HttpResponseError, + ResourceNotFoundError, + ResourceModifiedError, + ResourceExistsError, + ClientAuthenticationError, + DecodeError, +) +from azure.core.pipeline.policies import ContentDecodePolicy + +if sys.version_info < (3,): + + def _str(value): + if isinstance(value, unicode): # pylint: disable=undefined-variable + return value.encode("utf-8") + + return str(value) +else: + _str = str + + + +def _to_str(value): + return _str(value) if value is not None else None + + +_ERROR_TYPE_NOT_SUPPORTED = "Type not supported when sending data to the service: {0}." +_ERROR_VALUE_TOO_LARGE = "{0} is too large to be cast to type {1}." +_ERROR_UNKNOWN = "Unknown error ({0})" +_ERROR_VALUE_NONE = "{0} should not be None." +_ERROR_UNKNOWN_KEY_WRAP_ALGORITHM = "Unknown key wrap algorithm." + + +def _validate_not_none(param_name, param): + if param is None: + raise ValueError(_ERROR_VALUE_NONE.format(param_name)) + + +def _wrap_exception(ex, desired_type): + msg = "" + if len(ex.args) > 0: + msg = ex.args[0] + if sys.version_info >= (3,): + # Automatic chaining in Python 3 means we keep the trace + return desired_type(msg) + + # There isn't a good solution in 2 for keeping the stack trace + # in general, or that will not result in an error in 3 + # However, we can keep the previous error type and message + # TODO: In the future we will log the trace + return desired_type("{}: {}".format(ex.__class__.__name__, msg)) + + +def _validate_table_name(table_name): + if match("^[a-zA-Z]{1}[a-zA-Z0-9]{2,62}$", table_name) is None: + raise ValueError( + "Table names must be alphanumeric, cannot begin with a number, and must be between 3-63 characters long." + ) + + +def _decode_error(response, error_message=None, error_type=None, **kwargs): + error_code = response.headers.get("x-ms-error-code") + additional_data = {} + try: + error_body = ContentDecodePolicy.deserialize_from_http_generics(response) + if isinstance(error_body, dict): + for info in error_body.get("odata.error", {}): + if info == "code": + error_code = error_body["odata.error"][info] + elif info == "message": + error_message = error_body["odata.error"][info]["value"] + else: + additional_data[info.tag] = info.text + else: + if error_body: + for info in error_body.iter(): + if info.tag.lower().find("code") != -1: + error_code = info.text + elif info.tag.lower().find("message") != -1: + error_message = info.text + else: + additional_data[info.tag] = info.text + except DecodeError: + pass + + try: + if not error_type: + error_code = TableErrorCode(error_code) + if error_code in [ + TableErrorCode.condition_not_met, + TableErrorCode.update_condition_not_satisfied + ]: + error_type = ResourceModifiedError + elif error_code in [ + TableErrorCode.invalid_authentication_info, + TableErrorCode.authentication_failed, + ]: + error_type = ClientAuthenticationError + elif error_code in [ + TableErrorCode.resource_not_found, + TableErrorCode.table_not_found, + TableErrorCode.entity_not_found, + ResourceNotFoundError, + ]: + error_type = ResourceNotFoundError + elif error_code in [ + TableErrorCode.resource_already_exists, + TableErrorCode.table_already_exists, + TableErrorCode.account_already_exists, + TableErrorCode.entity_already_exists, + ResourceExistsError, + ]: + error_type = ResourceExistsError + else: + error_type = HttpResponseError + except ValueError: + # Got an unknown error code + error_type = HttpResponseError + + try: + error_message += "\nErrorCode:{}".format(error_code.value) + except AttributeError: + error_message += "\nErrorCode:{}".format(error_code) + for name, info in additional_data.items(): + error_message += "\n{}:{}".format(name, info) + + error = error_type(message=error_message, response=response, **kwargs) + error.error_code = error_code + error.additional_info = additional_data + return error + + +def _reraise_error(decoded_error): + _, _, exc_traceback = sys.exc_info() + try: + raise decoded_error.with_traceback(exc_traceback) + except AttributeError: + decoded_error.__traceback__ = exc_traceback + raise decoded_error + + +def _process_table_error(storage_error): + decoded_error = _decode_error(storage_error.response, storage_error.message) + _reraise_error(decoded_error) + + +class TableTransactionError(HttpResponseError): + """There is a failure in the transaction operations. + + :ivar int index: If available, the index of the operation in the transaction that caused the error. + Defaults to 0 in the case where an index was not provided, or the error applies across operations. + :ivar ~azure.data.tables.TableErrorCode error_code: The error code. + :ivar str message: The error message. + :ivar additional_info: Any additional data for the error. + :vartype additional_info: Mapping[str, Any] + """ + + def __init__(self, **kwargs): + super(TableTransactionError, self).__init__(**kwargs) + self.index = kwargs.get('index', self._extract_index()) + + def _extract_index(self): + try: + message_sections = self.message.split(':', 1) + return int(message_sections[0]) + except: # pylint: disable=bare-except + return 0 + + +class RequestTooLargeError(TableTransactionError): + """An error response with status code 413 - Request Entity Too Large""" + + +class TableErrorCode(str, Enum): + # Generic storage values + account_already_exists = "AccountAlreadyExists" + account_being_created = "AccountBeingCreated" + account_is_disabled = "AccountIsDisabled" + authentication_failed = "AuthenticationFailed" + authorization_failure = "AuthorizationFailure" + no_authentication_information = "NoAuthenticationInformation" + condition_headers_not_supported = "ConditionHeadersNotSupported" + condition_not_met = "ConditionNotMet" + empty_metadata_key = "EmptyMetadataKey" + insufficient_account_permissions = "InsufficientAccountPermissions" + internal_error = "InternalError" + invalid_authentication_info = "InvalidAuthenticationInfo" + invalid_header_value = "InvalidHeaderValue" + invalid_http_verb = "InvalidHttpVerb" + invalid_input = "InvalidInput" + invalid_md5 = "InvalidMd5" + invalid_metadata = "InvalidMetadata" + invalid_query_parameter_value = "InvalidQueryParameterValue" + invalid_range = "InvalidRange" + invalid_resource_name = "InvalidResourceName" + invalid_uri = "InvalidUri" + invalid_xml_document = "InvalidXmlDocument" + invalid_xml_node_value = "InvalidXmlNodeValue" + md5_mismatch = "Md5Mismatch" + metadata_too_large = "MetadataTooLarge" + missing_content_length_header = "MissingContentLengthHeader" + missing_required_query_parameter = "MissingRequiredQueryParameter" + missing_required_header = "MissingRequiredHeader" + missing_required_xml_node = "MissingRequiredXmlNode" + multiple_condition_headers_not_supported = "MultipleConditionHeadersNotSupported" + operation_timed_out = "OperationTimedOut" + out_of_range_input = "OutOfRangeInput" + out_of_range_query_parameter_value = "OutOfRangeQueryParameterValue" + request_body_too_large = "RequestBodyTooLarge" + resource_type_mismatch = "ResourceTypeMismatch" + request_url_failed_to_parse = "RequestUrlFailedToParse" + resource_already_exists = "ResourceAlreadyExists" + resource_not_found = "ResourceNotFound" + server_busy = "ServerBusy" + unsupported_header = "UnsupportedHeader" + unsupported_xml_node = "UnsupportedXmlNode" + unsupported_query_parameter = "UnsupportedQueryParameter" + unsupported_http_verb = "UnsupportedHttpVerb" + + # table error codes + duplicate_properties_specified = "DuplicatePropertiesSpecified" + entity_not_found = "EntityNotFound" + entity_already_exists = "EntityAlreadyExists" + entity_too_large = "EntityTooLarge" + host_information_not_present = "HostInformationNotPresent" + invalid_duplicate_row = "InvalidDuplicateRow" + invalid_value_type = "InvalidValueType" + json_format_not_supported = "JsonFormatNotSupported" + method_not_allowed = "MethodNotAllowed" + not_implemented = "NotImplemented" + properties_need_value = "PropertiesNeedValue" + property_name_invalid = "PropertyNameInvalid" + property_name_too_long = "PropertyNameTooLong" + property_value_too_large = "PropertyValueTooLarge" + table_already_exists = "TableAlreadyExists" + table_being_deleted = "TableBeingDeleted" + table_not_found = "TableNotFound" + too_many_properties = "TooManyProperties" + update_condition_not_satisfied = "UpdateConditionNotSatisfied" + x_method_incorrect_count = "XMethodIncorrectCount" + x_method_incorrect_value = "XMethodIncorrectValue" + x_method_not_using_post = "XMethodNotUsingPost" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/__init__.py new file mode 100644 index 000000000000..62c47d0d219e --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_table import AzureTable +from ._version import VERSION + +__version__ = VERSION +__all__ = ['AzureTable'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_azure_table.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_azure_table.py new file mode 100644 index 000000000000..94fbac4da4c4 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_azure_table.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core import PipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + +from ._configuration import AzureTableConfiguration +from .operations import TableOperations +from .operations import ServiceOperations +from . import models + + +class AzureTable(object): + """AzureTable. + + :ivar table: TableOperations operations + :vartype table: azure.data.tables.operations.TableOperations + :ivar service: ServiceOperations operations + :vartype service: azure.data.tables.operations.ServiceOperations + :param url: The URL of the service account or table that is the targe of the desired operation. + :type url: str + """ + + def __init__( + self, + url, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + base_url = '{url}' + self._config = AzureTableConfiguration(url, **kwargs) + self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.table = TableOperations( + self._client, self._config, self._serialize, self._deserialize) + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> AzureTable + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_configuration.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_configuration.py new file mode 100644 index 000000000000..e7c7674b0940 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_configuration.py @@ -0,0 +1,59 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any + + +class AzureTableConfiguration(Configuration): + """Configuration for AzureTable. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param url: The URL of the service account or table that is the targe of the desired operation. + :type url: str + """ + + def __init__( + self, + url, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + if url is None: + raise ValueError("Parameter 'url' must not be None.") + super(AzureTableConfiguration, self).__init__(**kwargs) + + self.url = url + self.version = "2019-02-02" + kwargs.setdefault('sdk_moniker', 'data-tables/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_version.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_version.py new file mode 100644 index 000000000000..0a99d31fccc0 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "2019-02-02" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/__init__.py new file mode 100644 index 000000000000..5029783fe86b --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._azure_table import AzureTable +__all__ = ['AzureTable'] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/_azure_table.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/_azure_table.py new file mode 100644 index 000000000000..1fc8affe42a6 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/_azure_table.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core import AsyncPipelineClient +from msrest import Deserializer, Serializer + +from ._configuration import AzureTableConfiguration +from .operations import TableOperations +from .operations import ServiceOperations +from .. import models + + +class AzureTable(object): + """AzureTable. + + :ivar table: TableOperations operations + :vartype table: azure.data.tables.aio.operations.TableOperations + :ivar service: ServiceOperations operations + :vartype service: azure.data.tables.aio.operations.ServiceOperations + :param url: The URL of the service account or table that is the targe of the desired operation. + :type url: str + """ + + def __init__( + self, + url: str, + **kwargs: Any + ) -> None: + base_url = '{url}' + self._config = AzureTableConfiguration(url, **kwargs) + self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.table = TableOperations( + self._client, self._config, self._serialize, self._deserialize) + self.service = ServiceOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "AzureTable": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/_configuration.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/_configuration.py new file mode 100644 index 000000000000..67bb9d47fb5c --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/_configuration.py @@ -0,0 +1,53 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +from .._version import VERSION + + +class AzureTableConfiguration(Configuration): + """Configuration for AzureTable. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param url: The URL of the service account or table that is the targe of the desired operation. + :type url: str + """ + + def __init__( + self, + url: str, + **kwargs: Any + ) -> None: + if url is None: + raise ValueError("Parameter 'url' must not be None.") + super(AzureTableConfiguration, self).__init__(**kwargs) + + self.url = url + self.version = "2019-02-02" + kwargs.setdefault('sdk_moniker', 'data-tables/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/__init__.py new file mode 100644 index 000000000000..774e1c0d97a4 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/__init__.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._table_operations import TableOperations +from ._service_operations import ServiceOperations + +__all__ = [ + 'TableOperations', + 'ServiceOperations', +] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/_service_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/_service_operations.py new file mode 100644 index 000000000000..14715416451e --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/_service_operations.py @@ -0,0 +1,258 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class ServiceOperations: + """ServiceOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.data.tables.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def set_properties( + self, + table_service_properties: "_models.TableServiceProperties", + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs + ) -> None: + """Sets properties for an account's Table service endpoint, including properties for Analytics and + CORS (Cross-Origin Resource Sharing) rules. + + :param table_service_properties: The Table Service properties. + :type table_service_properties: ~azure.data.tables.models.TableServiceProperties + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(table_service_properties, 'TableServiceProperties', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_properties.metadata = {'url': '/'} # type: ignore + + async def get_properties( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs + ) -> "_models.TableServiceProperties": + """Gets the properties of an account's Table service, including properties for Analytics and CORS + (Cross-Origin Resource Sharing) rules. + + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceProperties, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = self._deserialize('TableServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_properties.metadata = {'url': '/'} # type: ignore + + async def get_statistics( + self, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs + ) -> "_models.TableServiceStats": + """Retrieves statistics related to replication for the Table service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + account. + + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceStats, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableServiceStats + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceStats"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "stats" + accept = "application/xml" + + # Construct URL + url = self.get_statistics.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('TableServiceStats', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_statistics.metadata = {'url': '/'} # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/_table_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/_table_operations.py new file mode 100644 index 000000000000..29f2273f788d --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/aio/operations/_table_operations.py @@ -0,0 +1,1067 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TableOperations: + """TableOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.data.tables.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def query( + self, + request_id_parameter: Optional[str] = None, + next_table_name: Optional[str] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> "_models.TableQueryResponse": + """Queries tables under the given account. + + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param next_table_name: A table query continuation token from a previous call. + :type next_table_name: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableQueryResponse, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + _top = None + _select = None + _filter = None + if query_options is not None: + _format = query_options.format + _top = query_options.top + _select = query_options.select + _filter = query_options.filter + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.query.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + if _top is not None: + query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) + if _select is not None: + query_parameters['$select'] = self._serialize.query("select", _select, 'str') + if _filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') + if next_table_name is not None: + query_parameters['NextTableName'] = self._serialize.query("next_table_name", next_table_name, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-continuation-NextTableName']=self._deserialize('str', response.headers.get('x-ms-continuation-NextTableName')) + deserialized = self._deserialize('TableQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query.metadata = {'url': '/Tables'} # type: ignore + + async def create( + self, + table_properties: "_models.TableProperties", + request_id_parameter: Optional[str] = None, + response_preference: Optional[Union[str, "_models.ResponseFormat"]] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> Optional["_models.TableResponse"]: + """Creates a new table under the given account. + + :param table_properties: The Table properties. + :type table_properties: ~azure.data.tables.models.TableProperties + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param response_preference: Specifies whether the response should include the inserted entity + in the payload. Possible values are return-no-content and return-content. + :type response_preference: str or ~azure.data.tables.models.ResponseFormat + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableResponse, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableResponse or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.TableResponse"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json;odata=nometadata") + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if response_preference is not None: + header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(table_properties, 'TableProperties') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + deserialized = None + if response.status_code == 201: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + deserialized = self._deserialize('TableResponse', pipeline_response) + + if response.status_code == 204: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create.metadata = {'url': '/Tables'} # type: ignore + + async def delete( + self, + table: str, + request_id_parameter: Optional[str] = None, + **kwargs + ) -> None: + """Operation permanently deletes the specified table. + + :param table: The name of the table. + :type table: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/Tables(\'{table}\')'} # type: ignore + + async def query_entities( + self, + table: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + next_partition_key: Optional[str] = None, + next_row_key: Optional[str] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> "_models.TableEntityQueryResponse": + """Queries entities in a table. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param next_partition_key: An entity query continuation token from a previous call. + :type next_partition_key: str + :param next_row_key: An entity query continuation token from a previous call. + :type next_row_key: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableEntityQueryResponse, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableEntityQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableEntityQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + _top = None + _select = None + _filter = None + if query_options is not None: + _format = query_options.format + _top = query_options.top + _select = query_options.select + _filter = query_options.filter + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.query_entities.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + if _top is not None: + query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) + if _select is not None: + query_parameters['$select'] = self._serialize.query("select", _select, 'str') + if _filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') + if next_partition_key is not None: + query_parameters['NextPartitionKey'] = self._serialize.query("next_partition_key", next_partition_key, 'str') + if next_row_key is not None: + query_parameters['NextRowKey'] = self._serialize.query("next_row_key", next_row_key, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey')) + response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey')) + deserialized = self._deserialize('TableEntityQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query_entities.metadata = {'url': '/{table}()'} # type: ignore + + async def query_entity_with_partition_and_row_key( + self, + table: str, + partition_key: str, + row_key: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> Dict[str, object]: + """Queries a single entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: dict mapping str to object, or the result of cls(response) + :rtype: dict[str, object] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + _select = None + _filter = None + if query_options is not None: + _format = query_options.format + _select = query_options.select + _filter = query_options.filter + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.query_entity_with_partition_and_row_key.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + if _select is not None: + query_parameters['$select'] = self._serialize.query("select", _select, 'str') + if _filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey')) + response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey')) + deserialized = self._deserialize('{object}', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query_entity_with_partition_and_row_key.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + async def update_entity( + self, + table: str, + partition_key: str, + row_key: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + if_match: Optional[str] = None, + table_entity_properties: Optional[Dict[str, object]] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> None: + """Update entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a replace will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + + if cls: + return cls(pipeline_response, None, response_headers) + + update_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + async def merge_entity( + self, + table: str, + partition_key: str, + row_key: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + if_match: Optional[str] = None, + table_entity_properties: Optional[Dict[str, object]] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> None: + """Merge entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a merge will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.merge_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + + if cls: + return cls(pipeline_response, None, response_headers) + + merge_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + async def delete_entity( + self, + table: str, + partition_key: str, + row_key: str, + if_match: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> None: + """Deletes the specified entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param if_match: Match condition for an entity to be deleted. If specified and a matching + entity is not found, an error will be raised. To force an unconditional delete, set to the + wildcard character (*). + :type if_match: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.delete_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + async def insert_entity( + self, + table: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + response_preference: Optional[Union[str, "_models.ResponseFormat"]] = None, + table_entity_properties: Optional[Dict[str, object]] = None, + query_options: Optional["_models.QueryOptions"] = None, + **kwargs + ) -> Optional[Dict[str, object]]: + """Insert entity in a table. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param response_preference: Specifies whether the response should include the inserted entity + in the payload. Possible values are return-no-content and return-content. + :type response_preference: str or ~azure.data.tables.models.ResponseFormat + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: dict mapping str to object, or the result of cls(response) + :rtype: dict[str, object] or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional[Dict[str, object]]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json;odata=nometadata") + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.insert_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if response_preference is not None: + header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + deserialized = None + if response.status_code == 201: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + deserialized = self._deserialize('{object}', pipeline_response) + + if response.status_code == 204: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + insert_entity.metadata = {'url': '/{table}'} # type: ignore + + async def get_access_policy( + self, + table: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + **kwargs + ) -> List["_models.SignedIdentifier"]: + """Retrieves details about any stored access policies specified on the table that may be used with + Shared Access Signatures. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of SignedIdentifier, or the result of cls(response) + :rtype: list[~azure.data.tables.models.SignedIdentifier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "acl" + accept = "application/xml" + + # Construct URL + url = self.get_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_access_policy.metadata = {'url': '/{table}'} # type: ignore + + async def set_access_policy( + self, + table: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + table_acl: Optional[List["_models.SignedIdentifier"]] = None, + **kwargs + ) -> None: + """Sets stored access policies for the table that may be used with Shared Access Signatures. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param table_acl: The acls for the table. + :type table_acl: list[~azure.data.tables.models.SignedIdentifier] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "acl" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + if table_acl is not None: + body_content = self._serialize.body(table_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_policy.metadata = {'url': '/{table}'} # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/__init__.py new file mode 100644 index 000000000000..79fdccd13282 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/__init__.py @@ -0,0 +1,70 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import AccessPolicy + from ._models_py3 import CorsRule + from ._models_py3 import GeoReplication + from ._models_py3 import Logging + from ._models_py3 import Metrics + from ._models_py3 import QueryOptions + from ._models_py3 import RetentionPolicy + from ._models_py3 import SignedIdentifier + from ._models_py3 import TableEntityQueryResponse + from ._models_py3 import TableProperties + from ._models_py3 import TableQueryResponse + from ._models_py3 import TableResponse + from ._models_py3 import TableResponseProperties + from ._models_py3 import TableServiceError + from ._models_py3 import TableServiceProperties + from ._models_py3 import TableServiceStats +except (SyntaxError, ImportError): + from ._models import AccessPolicy # type: ignore + from ._models import CorsRule # type: ignore + from ._models import GeoReplication # type: ignore + from ._models import Logging # type: ignore + from ._models import Metrics # type: ignore + from ._models import QueryOptions # type: ignore + from ._models import RetentionPolicy # type: ignore + from ._models import SignedIdentifier # type: ignore + from ._models import TableEntityQueryResponse # type: ignore + from ._models import TableProperties # type: ignore + from ._models import TableQueryResponse # type: ignore + from ._models import TableResponse # type: ignore + from ._models import TableResponseProperties # type: ignore + from ._models import TableServiceError # type: ignore + from ._models import TableServiceProperties # type: ignore + from ._models import TableServiceStats # type: ignore + +from ._azure_table_enums import ( + GeoReplicationStatusType, + OdataMetadataFormat, + ResponseFormat, +) + +__all__ = [ + 'AccessPolicy', + 'CorsRule', + 'GeoReplication', + 'Logging', + 'Metrics', + 'QueryOptions', + 'RetentionPolicy', + 'SignedIdentifier', + 'TableEntityQueryResponse', + 'TableProperties', + 'TableQueryResponse', + 'TableResponse', + 'TableResponseProperties', + 'TableServiceError', + 'TableServiceProperties', + 'TableServiceStats', + 'GeoReplicationStatusType', + 'OdataMetadataFormat', + 'ResponseFormat', +] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_azure_table_enums.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_azure_table_enums.py new file mode 100644 index 000000000000..7685b428f163 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_azure_table_enums.py @@ -0,0 +1,46 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class GeoReplicationStatusType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The status of the secondary location. + """ + + LIVE = "live" + BOOTSTRAP = "bootstrap" + UNAVAILABLE = "unavailable" + +class OdataMetadataFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + APPLICATION_JSON_ODATA_NOMETADATA = "application/json;odata=nometadata" + APPLICATION_JSON_ODATA_MINIMALMETADATA = "application/json;odata=minimalmetadata" + APPLICATION_JSON_ODATA_FULLMETADATA = "application/json;odata=fullmetadata" + +class ResponseFormat(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + + RETURN_NO_CONTENT = "return-no-content" + RETURN_CONTENT = "return-content" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_models.py new file mode 100644 index 000000000000..1138cbf8dc5a --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_models.py @@ -0,0 +1,540 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class AccessPolicy(msrest.serialization.Model): + """An Access policy. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. The start datetime from which the policy is active. + :type start: str + :param expiry: Required. The datetime that the policy expires. + :type expiry: str + :param permission: Required. The permissions for the acl policy. + :type permission: str + """ + + _validation = { + 'start': {'required': True}, + 'expiry': {'required': True}, + 'permission': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str', 'xml': {'name': 'Start'}}, + 'expiry': {'key': 'Expiry', 'type': 'str', 'xml': {'name': 'Expiry'}}, + 'permission': {'key': 'Permission', 'type': 'str', 'xml': {'name': 'Permission'}}, + } + _xml_map = { + 'name': 'AccessPolicy' + } + + def __init__( + self, + **kwargs + ): + super(AccessPolicy, self).__init__(**kwargs) + self.start = kwargs['start'] + self.expiry = kwargs['expiry'] + self.permission = kwargs['permission'] + + +class CorsRule(msrest.serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to Azure. + + :param allowed_origins: Required. The origin domains that are permitted to make a request + against the service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :type allowed_origins: str + :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :type allowed_methods: str + :param allowed_headers: Required. The request headers that the origin domain may specify on the + CORS request. + :type allowed_headers: str + :param exposed_headers: Required. The response headers that may be sent in the response to the + CORS request and exposed by the browser to the request issuer. + :type exposed_headers: str + :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :type max_age_in_seconds: int + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'allowed_headers': {'required': True}, + 'exposed_headers': {'required': True}, + 'max_age_in_seconds': {'required': True, 'minimum': 0}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str', 'xml': {'name': 'AllowedOrigins'}}, + 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str', 'xml': {'name': 'AllowedMethods'}}, + 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str', 'xml': {'name': 'AllowedHeaders'}}, + 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str', 'xml': {'name': 'ExposedHeaders'}}, + 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int', 'xml': {'name': 'MaxAgeInSeconds'}}, + } + _xml_map = { + 'name': 'CorsRule' + } + + def __init__( + self, + **kwargs + ): + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = kwargs['allowed_origins'] + self.allowed_methods = kwargs['allowed_methods'] + self.allowed_headers = kwargs['allowed_headers'] + self.exposed_headers = kwargs['exposed_headers'] + self.max_age_in_seconds = kwargs['max_age_in_seconds'] + + +class GeoReplication(msrest.serialization.Model): + """GeoReplication. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. The status of the secondary location. Possible values include: "live", + "bootstrap", "unavailable". + :type status: str or ~azure.data.tables.models.GeoReplicationStatusType + :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :type last_sync_time: ~datetime.datetime + """ + + _validation = { + 'status': {'required': True}, + 'last_sync_time': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'Status', 'type': 'str', 'xml': {'name': 'Status'}}, + 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123', 'xml': {'name': 'LastSyncTime'}}, + } + _xml_map = { + 'name': 'GeoReplication' + } + + def __init__( + self, + **kwargs + ): + super(GeoReplication, self).__init__(**kwargs) + self.status = kwargs['status'] + self.last_sync_time = kwargs['last_sync_time'] + + +class Logging(msrest.serialization.Model): + """Azure Analytics Logging settings. + + All required parameters must be populated in order to send to Azure. + + :param version: Required. The version of Analytics to configure. + :type version: str + :param delete: Required. Indicates whether all delete requests should be logged. + :type delete: bool + :param read: Required. Indicates whether all read requests should be logged. + :type read: bool + :param write: Required. Indicates whether all write requests should be logged. + :type write: bool + :param retention_policy: Required. The retention policy. + :type retention_policy: ~azure.data.tables.models.RetentionPolicy + """ + + _validation = { + 'version': {'required': True}, + 'delete': {'required': True}, + 'read': {'required': True}, + 'write': {'required': True}, + 'retention_policy': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}}, + 'delete': {'key': 'Delete', 'type': 'bool', 'xml': {'name': 'Delete'}}, + 'read': {'key': 'Read', 'type': 'bool', 'xml': {'name': 'Read'}}, + 'write': {'key': 'Write', 'type': 'bool', 'xml': {'name': 'Write'}}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + _xml_map = { + 'name': 'Logging' + } + + def __init__( + self, + **kwargs + ): + super(Logging, self).__init__(**kwargs) + self.version = kwargs['version'] + self.delete = kwargs['delete'] + self.read = kwargs['read'] + self.write = kwargs['write'] + self.retention_policy = kwargs['retention_policy'] + + +class Metrics(msrest.serialization.Model): + """Metrics. + + All required parameters must be populated in order to send to Azure. + + :param version: The version of Analytics to configure. + :type version: str + :param enabled: Required. Indicates whether metrics are enabled for the Table service. + :type enabled: bool + :param include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :type include_apis: bool + :param retention_policy: The retention policy. + :type retention_policy: ~azure.data.tables.models.RetentionPolicy + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}}, + 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}}, + 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool', 'xml': {'name': 'IncludeAPIs'}}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + _xml_map = { + + } + + def __init__( + self, + **kwargs + ): + super(Metrics, self).__init__(**kwargs) + self.version = kwargs.get('version', None) + self.enabled = kwargs['enabled'] + self.include_apis = kwargs.get('include_apis', None) + self.retention_policy = kwargs.get('retention_policy', None) + + +class QueryOptions(msrest.serialization.Model): + """Parameter group. + + :param format: Specifies the media type for the response. Possible values include: + "application/json;odata=nometadata", "application/json;odata=minimalmetadata", + "application/json;odata=fullmetadata". + :type format: str or ~azure.data.tables.models.OdataMetadataFormat + :param top: Maximum number of records to return. + :type top: int + :param select: Select expression using OData notation. Limits the columns on each record to + just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". + :type select: str + :param filter: OData filter expression. + :type filter: str + """ + + _validation = { + 'top': {'minimum': 0}, + } + + _attribute_map = { + 'format': {'key': 'Format', 'type': 'str'}, + 'top': {'key': 'Top', 'type': 'int'}, + 'select': {'key': 'Select', 'type': 'str'}, + 'filter': {'key': 'Filter', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(QueryOptions, self).__init__(**kwargs) + self.format = kwargs.get('format', None) + self.top = kwargs.get('top', None) + self.select = kwargs.get('select', None) + self.filter = kwargs.get('filter', None) + + +class RetentionPolicy(msrest.serialization.Model): + """The retention policy. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Indicates whether a retention policy is enabled for the service. + :type enabled: bool + :param days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :type days: int + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}}, + 'days': {'key': 'Days', 'type': 'int', 'xml': {'name': 'Days'}}, + } + _xml_map = { + 'name': 'RetentionPolicy' + } + + def __init__( + self, + **kwargs + ): + super(RetentionPolicy, self).__init__(**kwargs) + self.enabled = kwargs['enabled'] + self.days = kwargs.get('days', None) + + +class SignedIdentifier(msrest.serialization.Model): + """A signed identifier. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. A unique id. + :type id: str + :param access_policy: The access policy. + :type access_policy: ~azure.data.tables.models.AccessPolicy + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'Id', 'type': 'str', 'xml': {'name': 'Id'}}, + 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, + } + _xml_map = { + 'name': 'SignedIdentifier' + } + + def __init__( + self, + **kwargs + ): + super(SignedIdentifier, self).__init__(**kwargs) + self.id = kwargs['id'] + self.access_policy = kwargs.get('access_policy', None) + + +class TableEntityQueryResponse(msrest.serialization.Model): + """The properties for the table entity query response. + + :param odata_metadata: The metadata response of the table. + :type odata_metadata: str + :param value: List of table entities. + :type value: list[dict[str, object]] + """ + + _attribute_map = { + 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[{object}]'}, + } + + def __init__( + self, + **kwargs + ): + super(TableEntityQueryResponse, self).__init__(**kwargs) + self.odata_metadata = kwargs.get('odata_metadata', None) + self.value = kwargs.get('value', None) + + +class TableProperties(msrest.serialization.Model): + """The properties for creating a table. + + :param table_name: The name of the table to create. + :type table_name: str + """ + + _attribute_map = { + 'table_name': {'key': 'TableName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TableProperties, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + + +class TableQueryResponse(msrest.serialization.Model): + """The properties for the table query response. + + :param odata_metadata: The metadata response of the table. + :type odata_metadata: str + :param value: List of tables. + :type value: list[~azure.data.tables.models.TableResponseProperties] + """ + + _attribute_map = { + 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[TableResponseProperties]'}, + } + + def __init__( + self, + **kwargs + ): + super(TableQueryResponse, self).__init__(**kwargs) + self.odata_metadata = kwargs.get('odata_metadata', None) + self.value = kwargs.get('value', None) + + +class TableResponseProperties(msrest.serialization.Model): + """The properties for the table response. + + :param table_name: The name of the table. + :type table_name: str + :param odata_type: The odata type of the table. + :type odata_type: str + :param odata_id: The id of the table. + :type odata_id: str + :param odata_edit_link: The edit link of the table. + :type odata_edit_link: str + """ + + _attribute_map = { + 'table_name': {'key': 'TableName', 'type': 'str'}, + 'odata_type': {'key': 'odata\\.type', 'type': 'str'}, + 'odata_id': {'key': 'odata\\.id', 'type': 'str'}, + 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TableResponseProperties, self).__init__(**kwargs) + self.table_name = kwargs.get('table_name', None) + self.odata_type = kwargs.get('odata_type', None) + self.odata_id = kwargs.get('odata_id', None) + self.odata_edit_link = kwargs.get('odata_edit_link', None) + + +class TableResponse(TableResponseProperties): + """The response for a single table. + + :param table_name: The name of the table. + :type table_name: str + :param odata_type: The odata type of the table. + :type odata_type: str + :param odata_id: The id of the table. + :type odata_id: str + :param odata_edit_link: The edit link of the table. + :type odata_edit_link: str + :param odata_metadata: The metadata response of the table. + :type odata_metadata: str + """ + + _attribute_map = { + 'table_name': {'key': 'TableName', 'type': 'str'}, + 'odata_type': {'key': 'odata\\.type', 'type': 'str'}, + 'odata_id': {'key': 'odata\\.id', 'type': 'str'}, + 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'}, + 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TableResponse, self).__init__(**kwargs) + self.odata_metadata = kwargs.get('odata_metadata', None) + + +class TableServiceError(msrest.serialization.Model): + """Table Service error. + + :param message: The error message. + :type message: str + """ + + _attribute_map = { + 'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}}, + } + _xml_map = { + + } + + def __init__( + self, + **kwargs + ): + super(TableServiceError, self).__init__(**kwargs) + self.message = kwargs.get('message', None) + + +class TableServiceProperties(msrest.serialization.Model): + """Table Service Properties. + + :param logging: Azure Analytics Logging settings. + :type logging: ~azure.data.tables.models.Logging + :param hour_metrics: A summary of request statistics grouped by API in hourly aggregates for + tables. + :type hour_metrics: ~azure.data.tables.models.Metrics + :param minute_metrics: A summary of request statistics grouped by API in minute aggregates for + tables. + :type minute_metrics: ~azure.data.tables.models.Metrics + :param cors: The set of CORS rules. + :type cors: list[~azure.data.tables.models.CorsRule] + """ + + _attribute_map = { + 'logging': {'key': 'Logging', 'type': 'Logging'}, + 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, + 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, + 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'name': 'Cors', 'wrapped': True, 'itemsName': 'CorsRule'}}, + } + _xml_map = { + 'name': 'StorageServiceProperties' + } + + def __init__( + self, + **kwargs + ): + super(TableServiceProperties, self).__init__(**kwargs) + self.logging = kwargs.get('logging', None) + self.hour_metrics = kwargs.get('hour_metrics', None) + self.minute_metrics = kwargs.get('minute_metrics', None) + self.cors = kwargs.get('cors', None) + + +class TableServiceStats(msrest.serialization.Model): + """Stats for the service. + + :param geo_replication: Geo-Replication information for the Secondary Storage Service. + :type geo_replication: ~azure.data.tables.models.GeoReplication + """ + + _attribute_map = { + 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + } + _xml_map = { + 'name': 'StorageServiceStats' + } + + def __init__( + self, + **kwargs + ): + super(TableServiceStats, self).__init__(**kwargs) + self.geo_replication = kwargs.get('geo_replication', None) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_models_py3.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_models_py3.py new file mode 100644 index 000000000000..13318e659b97 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/models/_models_py3.py @@ -0,0 +1,608 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._azure_table_enums import * + + +class AccessPolicy(msrest.serialization.Model): + """An Access policy. + + All required parameters must be populated in order to send to Azure. + + :param start: Required. The start datetime from which the policy is active. + :type start: str + :param expiry: Required. The datetime that the policy expires. + :type expiry: str + :param permission: Required. The permissions for the acl policy. + :type permission: str + """ + + _validation = { + 'start': {'required': True}, + 'expiry': {'required': True}, + 'permission': {'required': True}, + } + + _attribute_map = { + 'start': {'key': 'Start', 'type': 'str', 'xml': {'name': 'Start'}}, + 'expiry': {'key': 'Expiry', 'type': 'str', 'xml': {'name': 'Expiry'}}, + 'permission': {'key': 'Permission', 'type': 'str', 'xml': {'name': 'Permission'}}, + } + _xml_map = { + 'name': 'AccessPolicy' + } + + def __init__( + self, + *, + start: str, + expiry: str, + permission: str, + **kwargs + ): + super(AccessPolicy, self).__init__(**kwargs) + self.start = start + self.expiry = expiry + self.permission = permission + + +class CorsRule(msrest.serialization.Model): + """CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to Azure. + + :param allowed_origins: Required. The origin domains that are permitted to make a request + against the service via CORS. The origin domain is the domain from which the request + originates. Note that the origin must be an exact case-sensitive match with the origin that the + user age sends to the service. You can also use the wildcard character '*' to allow all origin + domains to make requests via CORS. + :type allowed_origins: str + :param allowed_methods: Required. The methods (HTTP request verbs) that the origin domain may + use for a CORS request. (comma separated). + :type allowed_methods: str + :param allowed_headers: Required. The request headers that the origin domain may specify on the + CORS request. + :type allowed_headers: str + :param exposed_headers: Required. The response headers that may be sent in the response to the + CORS request and exposed by the browser to the request issuer. + :type exposed_headers: str + :param max_age_in_seconds: Required. The maximum amount time that a browser should cache the + preflight OPTIONS request. + :type max_age_in_seconds: int + """ + + _validation = { + 'allowed_origins': {'required': True}, + 'allowed_methods': {'required': True}, + 'allowed_headers': {'required': True}, + 'exposed_headers': {'required': True}, + 'max_age_in_seconds': {'required': True, 'minimum': 0}, + } + + _attribute_map = { + 'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str', 'xml': {'name': 'AllowedOrigins'}}, + 'allowed_methods': {'key': 'AllowedMethods', 'type': 'str', 'xml': {'name': 'AllowedMethods'}}, + 'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str', 'xml': {'name': 'AllowedHeaders'}}, + 'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str', 'xml': {'name': 'ExposedHeaders'}}, + 'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int', 'xml': {'name': 'MaxAgeInSeconds'}}, + } + _xml_map = { + 'name': 'CorsRule' + } + + def __init__( + self, + *, + allowed_origins: str, + allowed_methods: str, + allowed_headers: str, + exposed_headers: str, + max_age_in_seconds: int, + **kwargs + ): + super(CorsRule, self).__init__(**kwargs) + self.allowed_origins = allowed_origins + self.allowed_methods = allowed_methods + self.allowed_headers = allowed_headers + self.exposed_headers = exposed_headers + self.max_age_in_seconds = max_age_in_seconds + + +class GeoReplication(msrest.serialization.Model): + """GeoReplication. + + All required parameters must be populated in order to send to Azure. + + :param status: Required. The status of the secondary location. Possible values include: "live", + "bootstrap", "unavailable". + :type status: str or ~azure.data.tables.models.GeoReplicationStatusType + :param last_sync_time: Required. A GMT date/time value, to the second. All primary writes + preceding this value are guaranteed to be available for read operations at the secondary. + Primary writes after this point in time may or may not be available for reads. + :type last_sync_time: ~datetime.datetime + """ + + _validation = { + 'status': {'required': True}, + 'last_sync_time': {'required': True}, + } + + _attribute_map = { + 'status': {'key': 'Status', 'type': 'str', 'xml': {'name': 'Status'}}, + 'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123', 'xml': {'name': 'LastSyncTime'}}, + } + _xml_map = { + 'name': 'GeoReplication' + } + + def __init__( + self, + *, + status: Union[str, "GeoReplicationStatusType"], + last_sync_time: datetime.datetime, + **kwargs + ): + super(GeoReplication, self).__init__(**kwargs) + self.status = status + self.last_sync_time = last_sync_time + + +class Logging(msrest.serialization.Model): + """Azure Analytics Logging settings. + + All required parameters must be populated in order to send to Azure. + + :param version: Required. The version of Analytics to configure. + :type version: str + :param delete: Required. Indicates whether all delete requests should be logged. + :type delete: bool + :param read: Required. Indicates whether all read requests should be logged. + :type read: bool + :param write: Required. Indicates whether all write requests should be logged. + :type write: bool + :param retention_policy: Required. The retention policy. + :type retention_policy: ~azure.data.tables.models.RetentionPolicy + """ + + _validation = { + 'version': {'required': True}, + 'delete': {'required': True}, + 'read': {'required': True}, + 'write': {'required': True}, + 'retention_policy': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}}, + 'delete': {'key': 'Delete', 'type': 'bool', 'xml': {'name': 'Delete'}}, + 'read': {'key': 'Read', 'type': 'bool', 'xml': {'name': 'Read'}}, + 'write': {'key': 'Write', 'type': 'bool', 'xml': {'name': 'Write'}}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + _xml_map = { + 'name': 'Logging' + } + + def __init__( + self, + *, + version: str, + delete: bool, + read: bool, + write: bool, + retention_policy: "RetentionPolicy", + **kwargs + ): + super(Logging, self).__init__(**kwargs) + self.version = version + self.delete = delete + self.read = read + self.write = write + self.retention_policy = retention_policy + + +class Metrics(msrest.serialization.Model): + """Metrics. + + All required parameters must be populated in order to send to Azure. + + :param version: The version of Analytics to configure. + :type version: str + :param enabled: Required. Indicates whether metrics are enabled for the Table service. + :type enabled: bool + :param include_apis: Indicates whether metrics should generate summary statistics for called + API operations. + :type include_apis: bool + :param retention_policy: The retention policy. + :type retention_policy: ~azure.data.tables.models.RetentionPolicy + """ + + _validation = { + 'enabled': {'required': True}, + } + + _attribute_map = { + 'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}}, + 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}}, + 'include_apis': {'key': 'IncludeAPIs', 'type': 'bool', 'xml': {'name': 'IncludeAPIs'}}, + 'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy'}, + } + _xml_map = { + + } + + def __init__( + self, + *, + enabled: bool, + version: Optional[str] = None, + include_apis: Optional[bool] = None, + retention_policy: Optional["RetentionPolicy"] = None, + **kwargs + ): + super(Metrics, self).__init__(**kwargs) + self.version = version + self.enabled = enabled + self.include_apis = include_apis + self.retention_policy = retention_policy + + +class QueryOptions(msrest.serialization.Model): + """Parameter group. + + :param format: Specifies the media type for the response. Possible values include: + "application/json;odata=nometadata", "application/json;odata=minimalmetadata", + "application/json;odata=fullmetadata". + :type format: str or ~azure.data.tables.models.OdataMetadataFormat + :param top: Maximum number of records to return. + :type top: int + :param select: Select expression using OData notation. Limits the columns on each record to + just those requested, e.g. "$select=PolicyAssignmentId, ResourceId". + :type select: str + :param filter: OData filter expression. + :type filter: str + """ + + _validation = { + 'top': {'minimum': 0}, + } + + _attribute_map = { + 'format': {'key': 'Format', 'type': 'str'}, + 'top': {'key': 'Top', 'type': 'int'}, + 'select': {'key': 'Select', 'type': 'str'}, + 'filter': {'key': 'Filter', 'type': 'str'}, + } + + def __init__( + self, + *, + format: Optional[Union[str, "OdataMetadataFormat"]] = None, + top: Optional[int] = None, + select: Optional[str] = None, + filter: Optional[str] = None, + **kwargs + ): + super(QueryOptions, self).__init__(**kwargs) + self.format = format + self.top = top + self.select = select + self.filter = filter + + +class RetentionPolicy(msrest.serialization.Model): + """The retention policy. + + All required parameters must be populated in order to send to Azure. + + :param enabled: Required. Indicates whether a retention policy is enabled for the service. + :type enabled: bool + :param days: Indicates the number of days that metrics or logging or soft-deleted data should + be retained. All data older than this value will be deleted. + :type days: int + """ + + _validation = { + 'enabled': {'required': True}, + 'days': {'minimum': 1}, + } + + _attribute_map = { + 'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}}, + 'days': {'key': 'Days', 'type': 'int', 'xml': {'name': 'Days'}}, + } + _xml_map = { + 'name': 'RetentionPolicy' + } + + def __init__( + self, + *, + enabled: bool, + days: Optional[int] = None, + **kwargs + ): + super(RetentionPolicy, self).__init__(**kwargs) + self.enabled = enabled + self.days = days + + +class SignedIdentifier(msrest.serialization.Model): + """A signed identifier. + + All required parameters must be populated in order to send to Azure. + + :param id: Required. A unique id. + :type id: str + :param access_policy: The access policy. + :type access_policy: ~azure.data.tables.models.AccessPolicy + """ + + _validation = { + 'id': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'Id', 'type': 'str', 'xml': {'name': 'Id'}}, + 'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy'}, + } + _xml_map = { + 'name': 'SignedIdentifier' + } + + def __init__( + self, + *, + id: str, + access_policy: Optional["AccessPolicy"] = None, + **kwargs + ): + super(SignedIdentifier, self).__init__(**kwargs) + self.id = id + self.access_policy = access_policy + + +class TableEntityQueryResponse(msrest.serialization.Model): + """The properties for the table entity query response. + + :param odata_metadata: The metadata response of the table. + :type odata_metadata: str + :param value: List of table entities. + :type value: list[dict[str, object]] + """ + + _attribute_map = { + 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[{object}]'}, + } + + def __init__( + self, + *, + odata_metadata: Optional[str] = None, + value: Optional[List[Dict[str, object]]] = None, + **kwargs + ): + super(TableEntityQueryResponse, self).__init__(**kwargs) + self.odata_metadata = odata_metadata + self.value = value + + +class TableProperties(msrest.serialization.Model): + """The properties for creating a table. + + :param table_name: The name of the table to create. + :type table_name: str + """ + + _attribute_map = { + 'table_name': {'key': 'TableName', 'type': 'str'}, + } + + def __init__( + self, + *, + table_name: Optional[str] = None, + **kwargs + ): + super(TableProperties, self).__init__(**kwargs) + self.table_name = table_name + + +class TableQueryResponse(msrest.serialization.Model): + """The properties for the table query response. + + :param odata_metadata: The metadata response of the table. + :type odata_metadata: str + :param value: List of tables. + :type value: list[~azure.data.tables.models.TableResponseProperties] + """ + + _attribute_map = { + 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[TableResponseProperties]'}, + } + + def __init__( + self, + *, + odata_metadata: Optional[str] = None, + value: Optional[List["TableResponseProperties"]] = None, + **kwargs + ): + super(TableQueryResponse, self).__init__(**kwargs) + self.odata_metadata = odata_metadata + self.value = value + + +class TableResponseProperties(msrest.serialization.Model): + """The properties for the table response. + + :param table_name: The name of the table. + :type table_name: str + :param odata_type: The odata type of the table. + :type odata_type: str + :param odata_id: The id of the table. + :type odata_id: str + :param odata_edit_link: The edit link of the table. + :type odata_edit_link: str + """ + + _attribute_map = { + 'table_name': {'key': 'TableName', 'type': 'str'}, + 'odata_type': {'key': 'odata\\.type', 'type': 'str'}, + 'odata_id': {'key': 'odata\\.id', 'type': 'str'}, + 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'}, + } + + def __init__( + self, + *, + table_name: Optional[str] = None, + odata_type: Optional[str] = None, + odata_id: Optional[str] = None, + odata_edit_link: Optional[str] = None, + **kwargs + ): + super(TableResponseProperties, self).__init__(**kwargs) + self.table_name = table_name + self.odata_type = odata_type + self.odata_id = odata_id + self.odata_edit_link = odata_edit_link + + +class TableResponse(TableResponseProperties): + """The response for a single table. + + :param table_name: The name of the table. + :type table_name: str + :param odata_type: The odata type of the table. + :type odata_type: str + :param odata_id: The id of the table. + :type odata_id: str + :param odata_edit_link: The edit link of the table. + :type odata_edit_link: str + :param odata_metadata: The metadata response of the table. + :type odata_metadata: str + """ + + _attribute_map = { + 'table_name': {'key': 'TableName', 'type': 'str'}, + 'odata_type': {'key': 'odata\\.type', 'type': 'str'}, + 'odata_id': {'key': 'odata\\.id', 'type': 'str'}, + 'odata_edit_link': {'key': 'odata\\.editLink', 'type': 'str'}, + 'odata_metadata': {'key': 'odata\\.metadata', 'type': 'str'}, + } + + def __init__( + self, + *, + table_name: Optional[str] = None, + odata_type: Optional[str] = None, + odata_id: Optional[str] = None, + odata_edit_link: Optional[str] = None, + odata_metadata: Optional[str] = None, + **kwargs + ): + super(TableResponse, self).__init__(table_name=table_name, odata_type=odata_type, odata_id=odata_id, odata_edit_link=odata_edit_link, **kwargs) + self.odata_metadata = odata_metadata + + +class TableServiceError(msrest.serialization.Model): + """Table Service error. + + :param message: The error message. + :type message: str + """ + + _attribute_map = { + 'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}}, + } + _xml_map = { + + } + + def __init__( + self, + *, + message: Optional[str] = None, + **kwargs + ): + super(TableServiceError, self).__init__(**kwargs) + self.message = message + + +class TableServiceProperties(msrest.serialization.Model): + """Table Service Properties. + + :param logging: Azure Analytics Logging settings. + :type logging: ~azure.data.tables.models.Logging + :param hour_metrics: A summary of request statistics grouped by API in hourly aggregates for + tables. + :type hour_metrics: ~azure.data.tables.models.Metrics + :param minute_metrics: A summary of request statistics grouped by API in minute aggregates for + tables. + :type minute_metrics: ~azure.data.tables.models.Metrics + :param cors: The set of CORS rules. + :type cors: list[~azure.data.tables.models.CorsRule] + """ + + _attribute_map = { + 'logging': {'key': 'Logging', 'type': 'Logging'}, + 'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics'}, + 'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics'}, + 'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'name': 'Cors', 'wrapped': True, 'itemsName': 'CorsRule'}}, + } + _xml_map = { + 'name': 'StorageServiceProperties' + } + + def __init__( + self, + *, + logging: Optional["Logging"] = None, + hour_metrics: Optional["Metrics"] = None, + minute_metrics: Optional["Metrics"] = None, + cors: Optional[List["CorsRule"]] = None, + **kwargs + ): + super(TableServiceProperties, self).__init__(**kwargs) + self.logging = logging + self.hour_metrics = hour_metrics + self.minute_metrics = minute_metrics + self.cors = cors + + +class TableServiceStats(msrest.serialization.Model): + """Stats for the service. + + :param geo_replication: Geo-Replication information for the Secondary Storage Service. + :type geo_replication: ~azure.data.tables.models.GeoReplication + """ + + _attribute_map = { + 'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication'}, + } + _xml_map = { + 'name': 'StorageServiceStats' + } + + def __init__( + self, + *, + geo_replication: Optional["GeoReplication"] = None, + **kwargs + ): + super(TableServiceStats, self).__init__(**kwargs) + self.geo_replication = geo_replication diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/__init__.py new file mode 100644 index 000000000000..774e1c0d97a4 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/__init__.py @@ -0,0 +1,15 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._table_operations import TableOperations +from ._service_operations import ServiceOperations + +__all__ = [ + 'TableOperations', + 'ServiceOperations', +] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/_service_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/_service_operations.py new file mode 100644 index 000000000000..a55e05345e87 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/_service_operations.py @@ -0,0 +1,265 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class ServiceOperations(object): + """ServiceOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.data.tables.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def set_properties( + self, + table_service_properties, # type: "_models.TableServiceProperties" + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Sets properties for an account's Table service endpoint, including properties for Analytics and + CORS (Cross-Origin Resource Sharing) rules. + + :param table_service_properties: The Table Service properties. + :type table_service_properties: ~azure.data.tables.models.TableServiceProperties + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(table_service_properties, 'TableServiceProperties', is_xml=True) + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_properties.metadata = {'url': '/'} # type: ignore + + def get_properties( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.TableServiceProperties" + """Gets the properties of an account's Table service, including properties for Analytics and CORS + (Cross-Origin Resource Sharing) rules. + + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceProperties, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableServiceProperties + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceProperties"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "properties" + accept = "application/xml" + + # Construct URL + url = self.get_properties.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + deserialized = self._deserialize('TableServiceProperties', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_properties.metadata = {'url': '/'} # type: ignore + + def get_statistics( + self, + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.TableServiceStats" + """Retrieves statistics related to replication for the Table service. It is only available on the + secondary location endpoint when read-access geo-redundant replication is enabled for the + account. + + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceStats, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableServiceStats + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableServiceStats"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + restype = "service" + comp = "stats" + accept = "application/xml" + + # Construct URL + url = self.get_statistics.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['restype'] = self._serialize.query("restype", restype, 'str') + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('TableServiceStats', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_statistics.metadata = {'url': '/'} # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/_table_operations.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/_table_operations.py new file mode 100644 index 000000000000..4fc9e308b463 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/operations/_table_operations.py @@ -0,0 +1,1082 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TableOperations(object): + """TableOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.data.tables.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def query( + self, + request_id_parameter=None, # type: Optional[str] + next_table_name=None, # type: Optional[str] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.TableQueryResponse" + """Queries tables under the given account. + + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param next_table_name: A table query continuation token from a previous call. + :type next_table_name: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableQueryResponse, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + _top = None + _select = None + _filter = None + if query_options is not None: + _format = query_options.format + _top = query_options.top + _select = query_options.select + _filter = query_options.filter + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.query.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + if _top is not None: + query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) + if _select is not None: + query_parameters['$select'] = self._serialize.query("select", _select, 'str') + if _filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') + if next_table_name is not None: + query_parameters['NextTableName'] = self._serialize.query("next_table_name", next_table_name, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-continuation-NextTableName']=self._deserialize('str', response.headers.get('x-ms-continuation-NextTableName')) + deserialized = self._deserialize('TableQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query.metadata = {'url': '/Tables'} # type: ignore + + def create( + self, + table_properties, # type: "_models.TableProperties" + request_id_parameter=None, # type: Optional[str] + response_preference=None, # type: Optional[Union[str, "_models.ResponseFormat"]] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> Optional["_models.TableResponse"] + """Creates a new table under the given account. + + :param table_properties: The Table properties. + :type table_properties: ~azure.data.tables.models.TableProperties + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param response_preference: Specifies whether the response should include the inserted entity + in the payload. Possible values are return-no-content and return-content. + :type response_preference: str or ~azure.data.tables.models.ResponseFormat + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableResponse, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableResponse or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.TableResponse"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json;odata=nometadata") + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if response_preference is not None: + header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(table_properties, 'TableProperties') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + deserialized = None + if response.status_code == 201: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + deserialized = self._deserialize('TableResponse', pipeline_response) + + if response.status_code == 204: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + create.metadata = {'url': '/Tables'} # type: ignore + + def delete( + self, + table, # type: str + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + """Operation permanently deletes the specified table. + + :param table: The name of the table. + :type table: str + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete.metadata = {'url': '/Tables(\'{table}\')'} # type: ignore + + def query_entities( + self, + table, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + next_partition_key=None, # type: Optional[str] + next_row_key=None, # type: Optional[str] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> "_models.TableEntityQueryResponse" + """Queries entities in a table. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param next_partition_key: An entity query continuation token from a previous call. + :type next_partition_key: str + :param next_row_key: An entity query continuation token from a previous call. + :type next_row_key: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableEntityQueryResponse, or the result of cls(response) + :rtype: ~azure.data.tables.models.TableEntityQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.TableEntityQueryResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + _top = None + _select = None + _filter = None + if query_options is not None: + _format = query_options.format + _top = query_options.top + _select = query_options.select + _filter = query_options.filter + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.query_entities.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + if _top is not None: + query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) + if _select is not None: + query_parameters['$select'] = self._serialize.query("select", _select, 'str') + if _filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') + if next_partition_key is not None: + query_parameters['NextPartitionKey'] = self._serialize.query("next_partition_key", next_partition_key, 'str') + if next_row_key is not None: + query_parameters['NextRowKey'] = self._serialize.query("next_row_key", next_row_key, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey')) + response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey')) + deserialized = self._deserialize('TableEntityQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query_entities.metadata = {'url': '/{table}()'} # type: ignore + + def query_entity_with_partition_and_row_key( + self, + table, # type: str + partition_key, # type: str + row_key, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> Dict[str, object] + """Queries a single entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: dict mapping str to object, or the result of cls(response) + :rtype: dict[str, object] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + _select = None + _filter = None + if query_options is not None: + _format = query_options.format + _select = query_options.select + _filter = query_options.filter + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.query_entity_with_partition_and_row_key.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + if _select is not None: + query_parameters['$select'] = self._serialize.query("select", _select, 'str') + if _filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['x-ms-continuation-NextPartitionKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextPartitionKey')) + response_headers['x-ms-continuation-NextRowKey']=self._deserialize('str', response.headers.get('x-ms-continuation-NextRowKey')) + deserialized = self._deserialize('{object}', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + query_entity_with_partition_and_row_key.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + def update_entity( + self, + table, # type: str + partition_key, # type: str + row_key, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + if_match=None, # type: Optional[str] + table_entity_properties=None, # type: Optional[Dict[str, object]] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Update entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a replace will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + + if cls: + return cls(pipeline_response, None, response_headers) + + update_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + def merge_entity( + self, + table, # type: str + partition_key, # type: str + row_key, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + if_match=None, # type: Optional[str] + table_entity_properties=None, # type: Optional[Dict[str, object]] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Merge entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a merge will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.merge_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + + if cls: + return cls(pipeline_response, None, response_headers) + + merge_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + def delete_entity( + self, + table, # type: str + partition_key, # type: str + row_key, # type: str + if_match, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the specified entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param if_match: Match condition for an entity to be deleted. If specified and a matching + entity is not found, an error will be raised. To force an unconditional delete, set to the + wildcard character (*). + :type if_match: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.delete_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + 'partitionKey': self._serialize.url("partition_key", partition_key, 'str'), + 'rowKey': self._serialize.url("row_key", row_key, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + delete_entity.metadata = {'url': '/{table}(PartitionKey=\'{partitionKey}\',RowKey=\'{rowKey}\')'} # type: ignore + + def insert_entity( + self, + table, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + response_preference=None, # type: Optional[Union[str, "_models.ResponseFormat"]] + table_entity_properties=None, # type: Optional[Dict[str, object]] + query_options=None, # type: Optional["_models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> Optional[Dict[str, object]] + """Insert entity in a table. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param response_preference: Specifies whether the response should include the inserted entity + in the payload. Possible values are return-no-content and return-content. + :type response_preference: str or ~azure.data.tables.models.ResponseFormat + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :keyword callable cls: A custom type or function that will be passed the direct response + :return: dict mapping str to object, or the result of cls(response) + :rtype: dict[str, object] or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[Optional[Dict[str, object]]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json;odata=nometadata") + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self.insert_entity.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + if _format is not None: + query_parameters['$format'] = self._serialize.query("format", _format, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['DataServiceVersion'] = self._serialize.header("data_service_version", data_service_version, 'str') + if response_preference is not None: + header_parameters['Prefer'] = self._serialize.header("response_preference", response_preference, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, '{object}') + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [201, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + deserialized = None + if response.status_code == 201: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + deserialized = self._deserialize('{object}', pipeline_response) + + if response.status_code == 204: + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + response_headers['ETag']=self._deserialize('str', response.headers.get('ETag')) + response_headers['Preference-Applied']=self._deserialize('str', response.headers.get('Preference-Applied')) + response_headers['Content-Type']=self._deserialize('str', response.headers.get('Content-Type')) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + insert_entity.metadata = {'url': '/{table}'} # type: ignore + + def get_access_policy( + self, + table, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> List["_models.SignedIdentifier"] + """Retrieves details about any stored access policies specified on the table that may be used with + Shared Access Signatures. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of SignedIdentifier, or the result of cls(response) + :rtype: list[~azure.data.tables.models.SignedIdentifier] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[List["_models.SignedIdentifier"]] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "acl" + accept = "application/xml" + + # Construct URL + url = self.get_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + deserialized = self._deserialize('[SignedIdentifier]', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, response_headers) + + return deserialized + get_access_policy.metadata = {'url': '/{table}'} # type: ignore + + def set_access_policy( + self, + table, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + table_acl=None, # type: Optional[List["_models.SignedIdentifier"]] + **kwargs # type: Any + ): + # type: (...) -> None + """Sets stored access policies for the table that may be used with Shared Access Signatures. + + :param table: The name of the table. + :type table: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param table_acl: The acls for the table. + :type table_acl: list[~azure.data.tables.models.SignedIdentifier] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + comp = "acl" + content_type = kwargs.pop("content_type", "application/xml") + accept = "application/xml" + + # Construct URL + url = self.set_access_policy.metadata['url'] # type: ignore + path_format_arguments = { + 'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True), + 'table': self._serialize.url("table", table, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0) + query_parameters['comp'] = self._serialize.query("comp", comp, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str') + if request_id_parameter is not None: + header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id_parameter", request_id_parameter, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + serialization_ctxt = {'xml': {'name': 'SignedIdentifiers', 'wrapped': True, 'itemsName': 'SignedIdentifier'}} + if table_acl is not None: + body_content = self._serialize.body(table_acl, '[SignedIdentifier]', is_xml=True, serialization_ctxt=serialization_ctxt) + else: + body_content = None + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize(_models.TableServiceError, response) + raise HttpResponseError(response=response, model=error) + + response_headers = {} + response_headers['x-ms-client-request-id']=self._deserialize('str', response.headers.get('x-ms-client-request-id')) + response_headers['x-ms-request-id']=self._deserialize('str', response.headers.get('x-ms-request-id')) + response_headers['x-ms-version']=self._deserialize('str', response.headers.get('x-ms-version')) + response_headers['Date']=self._deserialize('rfc-1123', response.headers.get('Date')) + + if cls: + return cls(pipeline_response, None, response_headers) + + set_access_policy.metadata = {'url': '/{table}'} # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/py.typed b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_generated/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_models.py new file mode 100644 index 000000000000..bb39c03cdc72 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_models.py @@ -0,0 +1,696 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from enum import Enum +from typing import TYPE_CHECKING, Any, Dict, List + +from azure.core.exceptions import HttpResponseError +from azure.core.paging import PageIterator +# from azure.core import CaseInsensitiveEnumMeta +# from six import with_metaclass + +from ._generated.models import TableServiceStats as GenTableServiceStats +from ._generated.models import AccessPolicy as GenAccessPolicy +from ._generated.models import Logging as GeneratedLogging +from ._generated.models import Metrics as GeneratedMetrics +from ._generated.models import RetentionPolicy as GeneratedRetentionPolicy +from ._generated.models import CorsRule as GeneratedCorsRule +from ._generated.models import QueryOptions +from ._deserialize import ( + _convert_to_entity, + _return_context_and_deserialized, + _extract_continuation_token, +) +from ._error import _process_table_error +from ._constants import NEXT_PARTITION_KEY, NEXT_ROW_KEY, NEXT_TABLE_NAME + +if TYPE_CHECKING: + from ._generated.models import TableQueryResponse + from ._generated.models import TableServiceProperties as GenTableServiceProperties + + +class TableAccessPolicy(GenAccessPolicy): + """Access Policy class used by the set and get access policy methods. + + A stored access policy can specify the start time, expiry time, and + permissions for the Shared Access Signatures with which it's associated. + Depending on how you want to control access to your resource, you can + specify all of these parameters within the stored access policy, and omit + them from the URL for the Shared Access Signature. Doing so permits you to + modify the associated signature's behavior at any time, as well as to revoke + it. Or you can specify one or more of the access policy parameters within + the stored access policy, and the others on the URL. Finally, you can + specify all of the parameters on the URL. In this case, you can use the + stored access policy to revoke the signature, but not to modify its behavior. + + Together the Shared Access Signature and the stored access policy must + include all fields required to authenticate the signature. If any required + fields are missing, the request will fail. Likewise, if a field is specified + both in the Shared Access Signature URL and in the stored access policy, the + request will fail with status code 400 (Bad Request). + + :keyword str permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :keyword expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :paramtype expiry: ~datetime.datetime or str + :keyword start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :paramtype start: ~datetime.datetime or str + """ + def __init__(self, **kwargs): # pylint: disable=super-init-not-called + self.start = kwargs.get('start') + self.expiry = kwargs.get('expiry') + self.permission = kwargs.get('permission') + + def __repr__(self): + # type: () -> str + return "TableAccessPolicy(start={}, expiry={}, permission={})".format( + self.start, self.expiry, self.permission + )[1024:] + + +class TableAnalyticsLogging(GeneratedLogging): + """Azure Analytics Logging settings. + + All required parameters must be populated in order to send to Azure. + + :keyword str version: Required. The version of Storage Analytics to configure. + :keyword bool delete: Required. Indicates whether all delete requests should be logged. + :keyword bool read: Required. Indicates whether all read requests should be logged. + :keyword bool write: Required. Indicates whether all write requests should be logged. + :keyword ~azure.data.tables.TableRetentionPolicy retention_policy: Required. + The retention policy for the metrics. + """ + + def __init__(self, **kwargs): # pylint: disable=super-init-not-called + # type: (Any)-> None + self.version = kwargs.get("version", u"1.0") + self.delete = kwargs.get("delete", False) + self.read = kwargs.get("read", False) + self.write = kwargs.get("write", False) + self.retention_policy = kwargs.get("retention_policy") or TableRetentionPolicy() + + @classmethod + def _from_generated(cls, generated): + if not generated: + return cls() + return cls( + version=generated.version, + delete=generated.delete, + read=generated.read, + write=generated.write, + retention_policy=TableRetentionPolicy._from_generated( # pylint: disable=protected-access + generated.retention_policy + ) + ) + + def __repr__(self): + # type: () -> str + return "TableAnalyticsLogging(version={}, delete={}, read={}, write={}, retention_policy={})".format( + self.version, self.delete, self.read, self.write, self.retention_policy + )[1024:] + + +class TableMetrics(GeneratedMetrics): + """A summary of request statistics grouped by API in hour or minute aggregates. + + All required parameters must be populated in order to send to Azure. + + :keyword str version: The version of Storage Analytics to configure. + :keyword bool enabled: Required. Indicates whether metrics are enabled for the service. + :keyword bool include_apis: Indicates whether metrics should generate summary + statistics for called API operations. + :keyword ~azure.data.tables.TableRetentionPolicy retention_policy: Required. + The retention policy for the metrics. + """ + + def __init__(self, **kwargs): # pylint: disable=super-init-not-called + # type: (Any) -> None + self.version = kwargs.get("version", u"1.0") + self.enabled = kwargs.get("enabled", False) + self.include_apis = kwargs.get("include_apis") + self.retention_policy = kwargs.get("retention_policy") or TableRetentionPolicy() + + @classmethod + def _from_generated(cls, generated): + # type: (...) -> TableMetrics + """A summary of request statistics grouped by API in hour or minute aggregates. + + :param TableMetrics generated: generated Metrics + """ + if not generated: + return cls() + return cls( + version=generated.version, + enabled=generated.enabled, + include_apis=generated.include_apis, + retention_policy=TableRetentionPolicy._from_generated( # pylint: disable=protected-access + generated.retention_policy + ) + ) + + def __repr__(self): + # type: () -> str + return "TableMetrics(version={}, enabled={}, include_apis={}, retention_policy={})".format( + self.version, self.enabled, self.include_apis, self.retention_policy + )[1024:] + + +class TableRetentionPolicy(GeneratedRetentionPolicy): + def __init__(self, **kwargs): # pylint: disable=super-init-not-called + # type: (Any) -> None + """The retention policy which determines how long the associated data should + persist. + + All required parameters must be populated in order to send to Azure. + + :keyword bool enabled: Required. Indicates whether a retention policy is enabled + for the storage service. Default value is False. + :keyword int days: Indicates the number of days that metrics or logging or + soft-deleted data should be retained. All data older than this value will + be deleted. Must be specified if policy is enabled. + """ + self.enabled = kwargs.get('enabled', False) + self.days = kwargs.get('days') + if self.enabled and (self.days is None): + raise ValueError("If policy is enabled, 'days' must be specified.") + + @classmethod + def _from_generated(cls, generated, **kwargs): # pylint: disable=unused-argument + # type: (GeneratedRetentionPolicy, Dict[str, Any]) -> TableRetentionPolicy + """The retention policy which determines how long the associated data should + persist. + + All required parameters must be populated in order to send to Azure. + + :param TableRetentionPolicy generated: Generated Retention Policy + """ + + if not generated: + return cls() + return cls( + enabled=generated.enabled, + days=generated.days, + ) + def __repr__(self): + # type: () -> str + return "TableRetentionPolicy(enabled={}, days={})".format(self.enabled, self.days)[1024:] + + +class TableCorsRule(object): + """CORS is an HTTP feature that enables a web application running under one + domain to access resources in another domain. Web browsers implement a + security restriction known as same-origin policy that prevents a web page + from calling APIs in a different domain; CORS provides a secure way to + allow one domain (the origin domain) to call APIs in another domain. + + All required parameters must be populated in order to send to Azure. + + :param list[str] allowed_origins: + A list of origin domains that will be allowed via CORS, or "*" to allow + all domains. The list of must contain at least one entry. Limited to 64 + origin domains. Each allowed origin can have up to 256 characters. + :param list[str] allowed_methods: + A list of HTTP methods that are allowed to be executed by the origin. + The list of must contain at least one entry. For Azure Storage, + permitted methods are DELETE, GET, HEAD, MERGE, POST, OPTIONS or PUT. + :keyword int max_age_in_seconds: + The number of seconds that the client/browser should cache a + pre-flight response. + :keyword list[str] exposed_headers: + Defaults to an empty list. A list of response headers to expose to CORS + clients. Limited to 64 defined headers and two prefixed headers. Each + header can be up to 256 characters. + :keyword list[str] allowed_headers: + Defaults to an empty list. A list of headers allowed to be part of + the cross-origin request. Limited to 64 defined headers and 2 prefixed + headers. Each header can be up to 256 characters. + """ + + def __init__( + self, + allowed_origins, # type: List[str] + allowed_methods, # type: List[str] + **kwargs # type: Any + ): + # type: (...)-> None + self.allowed_origins = allowed_origins + self.allowed_methods = allowed_methods + self.allowed_headers = kwargs.get("allowed_headers", []) + self.exposed_headers = kwargs.get("exposed_headers", []) + self.max_age_in_seconds = kwargs.get("max_age_in_seconds", 0) + + def _to_generated(self): + return GeneratedCorsRule( + allowed_origins=",".join(self.allowed_origins), + allowed_methods=",".join(self.allowed_methods), + allowed_headers=",".join(self.allowed_headers), + exposed_headers=",".join(self.exposed_headers), + max_age_in_seconds=self.max_age_in_seconds + ) + + @classmethod + def _from_generated(cls, generated): + exposedheaders = generated.exposed_headers.split(',') if generated.exposed_headers else [] + allowedheaders = generated.allowed_headers.split(',') if generated.allowed_headers else [] + return cls( + generated.allowed_origins.split(','), + generated.allowed_methods.split(','), + allowed_headers=allowedheaders, + exposed_headers=exposedheaders, + max_age_in_seconds=generated.max_age_in_seconds, + ) + + def __repr__(self): + # type: () -> str + return "TableCorsRules(allowed_origins={}, allowed_methods={}, allowed_headers={}, exposed_headers={}, max_age_in_seconds={})".format( # pylint: disable=line-too-long + self.allowed_origins, self.allowed_methods, self.allowed_headers, self.exposed_headers, self.max_age_in_seconds # pylint: disable=line-too-long + )[1024:] + + +class TablePropertiesPaged(PageIterator): + """An iterable of Table properties. + + :param callable command: Function to retrieve the next page of items. + :keyword int results_per_page: The maximum number of results retrieved per API call. + :keyword str filter: The filter to apply to results. + :keyword str continuation_token: An opaque continuation token. + """ + + def __init__(self, command, **kwargs): + super(TablePropertiesPaged, self).__init__( + self._get_next_cb, + self._extract_data_cb, + continuation_token=kwargs.get("continuation_token") or "", + ) + self._command = command + self._headers = None + self._response = None + self.results_per_page = kwargs.get("results_per_page") + self.filter = kwargs.get("filter") + self._location_mode = None + + def _get_next_cb(self, continuation_token, **kwargs): + query_options = QueryOptions(top=self.results_per_page, filter=self.filter) + try: + return self._command( + query_options=query_options, + next_table_name=continuation_token or None, + cls=kwargs.pop("cls", None) or _return_context_and_deserialized, + use_location=self._location_mode, + ) + except HttpResponseError as error: + _process_table_error(error) + + def _extract_data_cb(self, get_next_return): + self._location_mode, self._response, self._headers = get_next_return + props_list = [ + TableItem._from_generated(t, **self._headers) for t in self._response.value # pylint: disable=protected-access + ] + return self._headers[NEXT_TABLE_NAME] or None, props_list + + +class TableEntityPropertiesPaged(PageIterator): + """An iterable of TableEntity properties. + + :param callable command: Function to retrieve the next page of items. + :param str table: The name of the table. + :keyword int results_per_page: The maximum number of results retrieved per API call. + :keyword str filter: The filter to apply to results. + :keyword str select: The select filter to apply to results. + :keyword str continuation_token: An opaque continuation token. + """ + + def __init__(self, command, table, **kwargs): + super(TableEntityPropertiesPaged, self).__init__( + self._get_next_cb, + self._extract_data_cb, + continuation_token=kwargs.get("continuation_token") or {}, + ) + self._command = command + self._headers = None + self._response = None + self.table = table + self.results_per_page = kwargs.get("results_per_page") + self.filter = kwargs.get("filter") + self.select = kwargs.get("select") + self._location_mode = None + + def _get_next_cb(self, continuation_token, **kwargs): + next_partition_key, next_row_key = _extract_continuation_token( + continuation_token + ) + query_options = QueryOptions( + top=self.results_per_page, select=self.select, filter=self.filter + ) + try: + return self._command( + query_options=query_options, + next_row_key=next_row_key, + next_partition_key=next_partition_key, + table=self.table, + cls=kwargs.pop("cls", None) or _return_context_and_deserialized, + use_location=self._location_mode, + ) + except HttpResponseError as error: + _process_table_error(error) + + def _extract_data_cb(self, get_next_return): + self._location_mode, self._response, self._headers = get_next_return + props_list = [_convert_to_entity(t) for t in self._response.value] + next_entity = {} + if self._headers[NEXT_PARTITION_KEY] or self._headers[NEXT_ROW_KEY]: + next_entity = { + "PartitionKey": self._headers[NEXT_PARTITION_KEY], + "RowKey": self._headers[NEXT_ROW_KEY], + } + return next_entity or None, props_list + + +class TableSasPermissions(object): + def __init__(self, **kwargs): + # type: (Any) -> None + """ + :keyword bool read: + Get entities and query entities. + :keyword bool add: + Add entities. Add and Update permissions are required for upsert operations. + :keyword bool update: + Update entities. Add and Update permissions are required for upsert operations. + :keyword bool delete: + Delete entities. + """ + _str = kwargs.pop('_str', "") or "" + self.read = kwargs.pop("read", False) or ("r" in _str) + self.add = kwargs.pop("add", False) or ("a" in _str) + self.update = kwargs.pop("update", False) or ("u" in _str) + self.delete = kwargs.pop("delete", False) or ("d" in _str) + + def __or__(self, other): + # type: (TableSasPermissions) -> TableSasPermissions + """ + :param other: + :type other: :class:`~azure.data.tables.TableSasPermissions` + """ + return TableSasPermissions(_str=str(self) + str(other)) + + def __add__(self, other): + # type: (TableSasPermissions) -> TableSasPermissions + """ + :param other: + :type other: :class:`~azure.data.tables.TableSasPermissions` + """ + return TableSasPermissions(_str=str(self) + str(other)) + + def __str__(self): + # type: () -> str + return ( + ("r" if self.read else "") + + ("a" if self.add else "") + + ("u" if self.update else "") + + ("d" if self.delete else "") + ) + + def __repr__(self): + # type: () -> str + return "TableSasPermissions(read={}, add={}, update={}, delete={})".format( + self.read, self.add, self.update, self.delete + )[1024:] + + @classmethod + def from_string( + cls, + permission, + **kwargs + ): + # Type: (str, Dict[str, Any]) -> AccountSasPermissions + """Create AccountSasPermissions from a string. + + To specify read, write, delete, etc. permissions you need only to + include the first letter of the word in the string. E.g. for read and write + permissions you would provide a string "rw". + + :param str permission: Specify permissions in + the string with the first letter of the word. + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An AccountSasPermissions object + :rtype: :class:`~azure.data.tables.AccountSasPermissions` + """ + p_read = "r" in permission + p_add = "a" in permission + p_delete = "d" in permission + p_update = "u" in permission + + parsed = cls( + **dict(kwargs, read=p_read, add=p_add, delete=p_delete, update=p_update) + ) + parsed._str = permission # pylint: disable=protected-access,attribute-defined-outside-init + return parsed + + +def service_stats_deserialize(generated): + # type: (GenTableServiceStats) -> Dict[str, Any] + """Deserialize a ServiceStats objects into a dict.""" + return { + "geo_replication": { + "status": generated.geo_replication.status, # type: ignore + "last_sync_time": generated.geo_replication.last_sync_time, # type: ignore + } + } + + +def service_properties_deserialize(generated): + # type: (GenTableServiceProperties) -> Dict[str, Any] + """Deserialize a ServiceProperties objects into a dict.""" + return { + "analytics_logging": TableAnalyticsLogging._from_generated(generated.logging), # pylint: disable=protected-access + "hour_metrics": TableMetrics._from_generated( # pylint: disable=protected-access + generated.hour_metrics + ), + "minute_metrics": TableMetrics._from_generated( # pylint: disable=protected-access + generated.minute_metrics + ), + "cors": [ + TableCorsRule._from_generated(cors) # pylint: disable=protected-access + for cors in generated.cors # type: ignore + ], + } + + +class TableItem(object): + """ + Represents an Azure TableItem. + Returned by TableServiceClient.list_tables and TableServiceClient.query_tables. + + :ivar str name: The name of the table. + """ + + def __init__(self, name): + # type: (str) -> None + """ + :param str name: Name of the Table + """ + self.name = name + + # TODO: TableQueryResponse is not the correct type + @classmethod + def _from_generated(cls, generated, **kwargs): # pylint: disable=unused-argument + # type: (TableQueryResponse, Any) -> TableItem + return cls(generated.table_name) # type: ignore + + def __repr__(self): + # type: () -> str + return "TableItem(name={})".format(self.name)[1024:] + + +class TablePayloadFormat(object): + """ + Specifies the accepted content type of the response payload. More information + can be found here: https://msdn.microsoft.com/en-us/library/azure/dn535600.aspx + """ + + JSON_NO_METADATA = "application/json;odata=nometadata" + """Returns no type information for the entity properties.""" + + JSON_MINIMAL_METADATA = "application/json;odata=minimalmetadata" + """Returns minimal type information for the entity properties.""" + + JSON_FULL_METADATA = "application/json;odata=fullmetadata" + """Returns minimal type information for the entity properties plus some extra odata properties.""" + + +class UpdateMode(str, Enum): + REPLACE = "replace" + MERGE = "merge" + + +class TransactionOperation(str, Enum): + CREATE = "create" + UPSERT = "upsert" + UPDATE = "update" + DELETE = "delete" + + +class SASProtocol(str, Enum): + HTTPS = "https" + HTTP = "http" + + +class LocationMode(str, Enum): + """ + Specifies the location the request should be sent to. This mode only applies + for RA-GRS accounts which allow secondary read access. All other account types + must use PRIMARY. + """ + + PRIMARY = "primary" #: Requests should be sent to the primary location. + SECONDARY = ( + "secondary" #: Requests should be sent to the secondary location, if possible. + ) + + +class ResourceTypes(object): + """ + Specifies the resource types that are accessible with the account SAS. + + :keyword bool service: + Access to service-level APIs (e.g., Get/Set Service Properties, + Get Service Stats, List Tables) + :keyword bool object: + Access to object-level APIs for tables (e.g. Get/Create/Query Entity etc.) + """ + + def __init__(self, **kwargs): + # type: (Any) -> None + self.service = kwargs.get('service', False) + self.object = kwargs.get('object', False) + self._str = ("s" if self.service else "") + ("o" if self.object else "") + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, string): + # type: (str) -> ResourceTypes + """Create a ResourceTypes from a string. + + To specify service, container, or object you need only to + include the first letter of the word in the string. E.g. service and container, + you would provide a string "sc". + + :param str string: Specify service, container, or object in + in the string with the first letter of the word. + :return: A ResourceTypes object + :rtype: :class:`~azure.data.tables.ResourceTypes` + """ + res_service = "s" in string + res_object = "o" in string + + parsed = cls(service=res_service, object=res_object) + parsed._str = string # pylint: disable = protected-access + return parsed + + +class AccountSasPermissions(object): + """ + :class:`~AccountSasPermissions` class to be used with generate_account_sas + + :ivar bool read: + Valid for all signed resources types (Service, Container, and Object). + Permits read permissions to the specified resource type. + :ivar bool write: + Valid for all signed resources types (Service, Container, and Object). + Permits write permissions to the specified resource type. + :ivar bool delete: + Valid for Container and Object resource types, except for queue messages. + :ivar bool list: + Valid for Service and Container resource types only. + :ivar bool add: + Valid for the following Object resource types only: queue messages, and append blobs. + :ivar bool create: + Valid for the following Object resource types only: blobs and files. + Users can create new blobs or files, but may not overwrite existing + blobs or files. + :ivar bool update: + Valid for the following Object resource types only: queue messages. + :ivar bool process: + Valid for the following Object resource type only: queue messages. + """ + + def __init__(self, **kwargs): + self.read = kwargs.pop("read", False) + self.write = kwargs.pop("write", False) + self.delete = kwargs.pop("delete", False) + self.list = kwargs.pop("list", False) + self.add = kwargs.pop("add", False) + self.create = kwargs.pop("create", False) + self.update = kwargs.pop("update", False) + self.process = kwargs.pop("process", False) + self._str = ( + ("r" if self.read else "") + + ("w" if self.write else "") + + ("d" if self.delete else "") + + ("l" if self.list else "") + + ("a" if self.add else "") + + ("c" if self.create else "") + + ("u" if self.update else "") + + ("p" if self.process else "") + ) + + def __str__(self): + return self._str + + @classmethod + def from_string(cls, permission, **kwargs): + # type: (str, Dict[str, Any]) -> AccountSasPermissions + """Create AccountSasPermissions from a string. + + To specify read, write, delete, etc. permissions you need only to + include the first letter of the word in the string. E.g. for read and write + permissions you would provide a string "rw". + + :param permission: Specify permissions in the string with the first letter of the word. + :type permission: str + :return: An AccountSasPermissions object + :rtype: :class:`~azure.data.tables.AccountSasPermissions` + """ + p_read = "r" in permission + p_write = "w" in permission + p_delete = "d" in permission + p_list = "l" in permission + p_add = "a" in permission + p_create = "c" in permission + p_update = "u" in permission + p_process = "p" in permission + + parsed = cls( + **dict( + kwargs, + read=p_read, + write=p_write, + delete=p_delete, + list=p_list, + add=p_add, + create=p_create, + update=p_update, + process=p_process, + ) + ) + parsed._str = permission # pylint: disable = protected-access + return parsed diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_policies.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_policies.py new file mode 100644 index 000000000000..ad5045703369 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_policies.py @@ -0,0 +1,241 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import time +from typing import Any, TYPE_CHECKING, Dict +from wsgiref.handlers import format_date_time +try: + from urllib.parse import urlparse +except ImportError: + from urlparse import urlparse # type: ignore + +from azure.core.pipeline.policies import ( + HeadersPolicy, + SansIOHTTPPolicy, + RetryPolicy, +) +from azure.core.exceptions import AzureError, ServiceRequestError, ClientAuthenticationError + +from ._common_conversion import _transform_patch_to_cosmos_post +from ._models import LocationMode + +if TYPE_CHECKING: + from azure.core.pipeline import PipelineRequest + + +def set_next_host_location(settings, request): + # type: (Dict[str, Any], PipelineRequest) -> None + """ + A function which sets the next host location on the request, if applicable. + """ + if request.http_request.method not in ['GET', 'HEAD']: + return + try: + if settings["retry_secondary"] and settings["hosts"] and all(settings["hosts"].values()): + url = urlparse(request.http_request.url) + # If there's more than one possible location, retry to the alternative + if settings["mode"] == LocationMode.PRIMARY: + settings["mode"] = LocationMode.SECONDARY + else: + settings["mode"] = LocationMode.PRIMARY + updated = url._replace(netloc=settings["hosts"].get(settings["mode"])) + request.http_request.url = updated.geturl() + except KeyError: + pass + + +class StorageHeadersPolicy(HeadersPolicy): + + def on_request(self, request): + # type: (PipelineRequest) -> None + super(StorageHeadersPolicy, self).on_request(request) + + # Add required date headers + current_time = format_date_time(time.time()) + request.http_request.headers["x-ms-date"] = current_time + request.http_request.headers["Date"] = current_time + + +class StorageHosts(SansIOHTTPPolicy): + def __init__(self, hosts=None, **kwargs): # pylint: disable=unused-argument + self.hosts = hosts + super(StorageHosts, self).__init__() + + def on_request(self, request): + # type: (PipelineRequest) -> None + request.context.options["hosts"] = self.hosts + parsed_url = urlparse(request.http_request.url) + + # Detect what location mode we're currently requesting with + location_mode = LocationMode.PRIMARY + for key, value in self.hosts.items(): + if parsed_url.netloc == value: + location_mode = key + + # See if a specific location mode has been specified, and if so, redirect + use_location = request.context.options.pop("use_location", None) + if use_location: + # Lock retries to the specific location + request.context.options["retry_to_secondary"] = False + if use_location not in self.hosts: + raise ValueError( + "Attempting to use undefined host location {}".format(use_location) + ) + if use_location != location_mode: + # Update request URL to use the specified location + updated = parsed_url._replace(netloc=self.hosts[use_location]) + request.http_request.url = updated.geturl() + location_mode = use_location + + request.context.options["location_mode"] = location_mode + + +class TablesRetryPolicy(RetryPolicy): + """A retry policy. + + The retry policy in the pipeline can be configured directly, or tweaked on a per-call basis. + + :keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host + location. Default value is False. + + :keyword int retry_total: Total number of retries to allow. Takes precedence over other counts. + Default value is 10. + + :keyword int retry_connect: How many connection-related errors to retry on. + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. Default value is 3. + + :keyword int retry_read: How many times to retry on read errors. + These errors are raised after the request was sent to the server, so the + request may have side-effects. Default value is 3. + + :keyword int retry_status: How many times to retry on bad status codes. Default value is 3. + + :keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a delay). + In fixed mode, retry policy will alwasy sleep for {backoff factor}. + In 'exponential' mode, retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` + seconds. If the backoff_factor is 0.1, then the retry will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8. + + :keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes). + + :keyword RetryMode retry_mode: Fixed or exponential delay between attemps, default is exponential. + + :keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days). + """ + + def __init__(self, **kwargs): + super(TablesRetryPolicy, self).__init__(**kwargs) + self.retry_to_secondary = kwargs.get('retry_to_secondary', False) + + def is_retry(self, settings, response): + """Is this method/status code retryable? (Based on whitelists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + should_retry = super(TablesRetryPolicy, self).is_retry(settings, response) + status = response.http_response.status_code + if status == 404 and settings['mode'] == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + return should_retry + + def configure_retries(self, options): + """Configures the retry settings. + + :param options: keyword arguments from context. + :return: A dict containing settings and history for retries. + :rtype: dict + """ + config = super(TablesRetryPolicy, self).configure_retries(options) + config["retry_secondary"] = options.pop("retry_to_secondary", self.retry_to_secondary) + config["mode"] = options.pop("location_mode", LocationMode.PRIMARY) + config["hosts"] = options.pop("hosts", None) + return config + + def update_context(self, context, retry_settings): + """Updates retry history in pipeline context. + + :param context: The pipeline context. + :type context: ~azure.core.pipeline.PipelineContext + :param retry_settings: The retry settings. + :type retry_settings: dict + """ + super(TablesRetryPolicy, self).update_context(context, retry_settings) + context['location_mode'] = retry_settings['mode'] + + def update_request(self, request, retry_settings): # pylint:disable=no-self-use + """Updates the pipeline request before attempting to retry. + + :param PipelineRequest request: The outgoing request. + :param dict(str, Any) retry_settings: The current retry context settings. + """ + set_next_host_location(retry_settings, request) + + def send(self, request): + """Sends the PipelineRequest object to the next policy. Uses retry settings if necessary. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: :class:`~azure.core.pipeline.PipelineResponse` + :raises: ~azure.core.exceptions.AzureError if maximum retries exceeded. + :raises: ~azure.core.exceptions.ClientAuthenticationError if authentication + """ + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + absolute_timeout = retry_settings['timeout'] + is_response_error = True + + while retry_active: + try: + start_time = time.time() + self._configure_timeout(request, absolute_timeout, is_response_error) + response = self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + self.update_request(request, retry_settings) + self.sleep(retry_settings, request.context.transport, response=response) + is_response_error = True + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except AzureError as err: + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + self.update_request(request, retry_settings) + self.sleep(retry_settings, request.context.transport) + if isinstance(err, ServiceRequestError): + is_response_error = False + else: + is_response_error = True + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response + + +class CosmosPatchTransformPolicy(SansIOHTTPPolicy): + """Policy to transform PATCH requests into POST requests with the "X-HTTP-Method":"MERGE" header set.""" + + def on_request(self, request): + # type: (PipelineRequest) -> None + if request.http_request.method == "PATCH": + _transform_patch_to_cosmos_post(request.http_request) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_sdk_moniker.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_sdk_moniker.py new file mode 100644 index 000000000000..8a7571636b37 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_sdk_moniker.py @@ -0,0 +1,11 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._version import VERSION + +SDK_MONIKER = "data-tables/{}".format(VERSION) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_serialize.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_serialize.py new file mode 100644 index 000000000000..b7c71952d08f --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_serialize.py @@ -0,0 +1,255 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from binascii import hexlify +from typing import Dict +from uuid import UUID +from datetime import datetime +from math import isnan +from enum import Enum +import sys + +import six +from azure.core import MatchConditions +from azure.core.exceptions import raise_with_traceback + +from ._entity import EdmType +from ._common_conversion import _encode_base64, _to_utc_datetime +from ._error import _ERROR_VALUE_TOO_LARGE, _ERROR_TYPE_NOT_SUPPORTED + + +def _get_match_headers(etag, match_condition): + if match_condition == MatchConditions.IfNotModified: + if not etag: + raise ValueError("IfNotModified must be specified with etag.") + return etag + if match_condition == MatchConditions.Unconditionally: + if etag: + raise ValueError("Etag is not supported for an Unconditional operation.") + return "*" + raise ValueError("Unsupported match condition: {}".format(match_condition)) + + +def _parameter_filter_substitution(parameters, query_filter): + # type: (Dict[str, str], str) -> str + """Replace user defined parameter in filter + :param parameters: User defined parameters + :param str query_filter: Filter for querying + """ + if parameters: + filter_strings = query_filter.split(' ') + for index, word in enumerate(filter_strings): + if word[0] == u'@': + val = parameters[word[1:]] + if val in [True, False]: + filter_strings[index] = str(val).lower() + elif isinstance(val, (float)): + filter_strings[index] = str(val) + elif isinstance(val, six.integer_types): + if val.bit_length() <= 32: + filter_strings[index] = str(val) + else: + filter_strings[index] = "{}L".format(str(val)) + elif isinstance(val, datetime): + filter_strings[index] = "datetime'{}'".format(_to_utc_datetime(val)) + elif isinstance(val, UUID): + filter_strings[index] = "guid'{}'".format(str(val)) + elif isinstance(val, six.binary_type): + v = str(hexlify(val)) + if v[0] == 'b': # Python 3 adds a 'b' and quotations, python 2.7 does neither + v = v[2:-1] + filter_strings[index] = "X'{}'".format(v) + else: + filter_strings[index] = "'{}'".format(val.replace("'", "''")) + return ' '.join(filter_strings) + return query_filter + + +def _to_entity_binary(value): + return EdmType.BINARY, _encode_base64(value) + + +def _to_entity_bool(value): + return None, value + + +def _to_entity_datetime(value): + if isinstance(value, str): + # Pass a serialized datetime straight through + return EdmType.DATETIME, value + try: + # Check is this is a 'round-trip' datetime, and if so + # pass through the original value. + if value.tables_service_value: + return EdmType.DATETIME, value.tables_service_value + except AttributeError: + pass + return EdmType.DATETIME, _to_utc_datetime(value) + + +def _to_entity_float(value): + if isnan(value): + return EdmType.DOUBLE, "NaN" + if value == float("inf"): + return EdmType.DOUBLE, "Infinity" + if value == float("-inf"): + return EdmType.DOUBLE, "-Infinity" + return EdmType.DOUBLE, value + + +def _to_entity_guid(value): + return EdmType.GUID, str(value) + + +def _to_entity_int32(value): + value = int(value) + if value >= 2 ** 31 or value < -(2 ** 31): + raise TypeError(_ERROR_VALUE_TOO_LARGE.format(str(value), EdmType.INT32)) + return None, value + + +def _to_entity_int64(value): + if sys.version_info < (3,): + ivalue = int(value) + else: + ivalue = int(value) + if ivalue >= 2 ** 63 or ivalue < -(2 ** 63): + raise TypeError(_ERROR_VALUE_TOO_LARGE.format(str(value), EdmType.INT64)) + return EdmType.INT64, str(value) + + +def _to_entity_str(value): + return EdmType.STRING, value + + +def _to_entity_none(value): # pylint: disable=unused-argument + return None, None + + +# Conversion from Python type to a function which returns a tuple of the +# type string and content string. +_PYTHON_TO_ENTITY_CONVERSIONS = { + int: _to_entity_int32, + bool: _to_entity_bool, + datetime: _to_entity_datetime, + float: _to_entity_float, + UUID: _to_entity_guid, + Enum: _to_entity_str, +} +try: + _PYTHON_TO_ENTITY_CONVERSIONS.update( + { + unicode: _to_entity_str, # type: ignore + str: _to_entity_binary, + long: _to_entity_int32, # type: ignore + } + ) +except NameError: + _PYTHON_TO_ENTITY_CONVERSIONS.update( + { + str: _to_entity_str, + bytes: _to_entity_binary, + } + ) + +# Conversion from Edm type to a function which returns a tuple of the +# type string and content string. +_EDM_TO_ENTITY_CONVERSIONS = { + EdmType.BINARY: _to_entity_binary, + EdmType.BOOLEAN: _to_entity_bool, + EdmType.DATETIME: _to_entity_datetime, + EdmType.DOUBLE: _to_entity_float, + EdmType.GUID: _to_entity_guid, + EdmType.INT32: _to_entity_int32, + EdmType.INT64: _to_entity_int64, + EdmType.STRING: _to_entity_str, +} + + +def _add_entity_properties(source): + """Converts an entity object to json to send. + The entity format is: + { + "Address":"Mountain View", + "Age":23, + "AmountDue":200.23, + "CustomerCode@odata.type":"Edm.Guid", + "CustomerCode":"c9da6455-213d-42c9-9a79-3e9149a57833", + "CustomerSince@odata.type":"Edm.DateTime", + "CustomerSince":"2008-07-10T00:00:00", + "IsActive":true, + "NumberOfOrders@odata.type":"Edm.Int64", + "NumberOfOrders":"255", + "PartitionKey":"mypartitionkey", + "RowKey":"myrowkey" + } + """ + + properties = {} + + to_send = dict(source) # shallow copy + + # set properties type for types we know if value has no type info. + # if value has type info, then set the type to value.type + for name, value in to_send.items(): + mtype = "" + + if isinstance(value, Enum): + try: + conv = _PYTHON_TO_ENTITY_CONVERSIONS.get(unicode) # type: ignore + except NameError: + conv = _PYTHON_TO_ENTITY_CONVERSIONS.get(str) + mtype, value = conv(value) + elif isinstance(value, datetime): + mtype, value = _to_entity_datetime(value) + elif isinstance(value, tuple): + conv = _EDM_TO_ENTITY_CONVERSIONS.get(value[1]) + mtype, value = conv(value[0]) + else: + conv = _PYTHON_TO_ENTITY_CONVERSIONS.get(type(value)) + if conv is None and value is not None: + raise TypeError(_ERROR_TYPE_NOT_SUPPORTED.format(type(value))) + if value is None: + conv = _to_entity_none + + mtype, value = conv(value) + + # form the property node + if value is not None: + properties[name] = value + if mtype: + properties[name + "@odata.type"] = mtype.value + + # generate the entity_body + return properties + + +def serialize_iso(attr): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises ValueError: If format is invalid. + """ + if not attr: + return None + if isinstance(attr, str): + # Pass a string through unaltered + return attr + try: + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(ValueError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_shared_access_signature.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_shared_access_signature.py new file mode 100644 index 000000000000..e80c725156d0 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_shared_access_signature.py @@ -0,0 +1,283 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from datetime import date +from typing import Optional, Union, TYPE_CHECKING + +from ._deserialize import url_quote + + +from ._common_conversion import ( + _sign_string, + _to_str, +) +from ._constants import DEFAULT_X_MS_VERSION + +if TYPE_CHECKING: + from datetime import datetime # pylint: disable=ungrouped-imports + from ._models import AccountSasPermissions, ResourceTypes, SASProtocol + + +def _to_utc_datetime(value): + # This is for SAS where milliseconds are not supported + return value.strftime("%Y-%m-%dT%H:%M:%SZ") + + +class SharedAccessSignature(object): + """ + Provides a factory for creating account access + signature tokens with an account name and account key. Users can either + use the factory or can construct the appropriate service and use the + generate_*_shared_access_signature method directly. + """ + + def __init__(self, credential, x_ms_version=DEFAULT_X_MS_VERSION): + """ + :param credential: The credential used for authenticating requests + :type credential: :class:`~azure.core.credentials.NamedKeyCredential` + :param str x_ms_version: + The service version used to generate the shared access signatures. + """ + self.account_name = credential.named_key.name + self.account_key = credential.named_key.key + self.x_ms_version = x_ms_version + + def generate_account( + self, + services, # type: str + resource_types, # type: ResourceTypes + permission, # type: Union[AccountSasPermissions, str] + expiry, # type: Union[datetime, str] + start=None, # type: Optional[Union[datetime, str]] + ip_address_or_range=None, # type: Optional[str] + protocol=None, # type: Optional[Union[str, SASProtocol]] + ): + # type: (...) -> str + """ + Generates a shared access signature for the account. + Use the returned signature with the sas_token parameter of the service + or to create a new account object. + + :param str services: + Specifies the services as a bitmap accessible with the account SAS. You can + combine values to provide access to more than one service. + :param ResourceTypes resource_types: + Specifies the resource types that are accessible with the account + SAS. You can combine values to provide access to more than one + resource type. + :param AccountSasPermissions permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. You can combine + values to provide more than one permission. + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str ip_address_or_range: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param Union[str, SASProtocol] protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.cosmosdb.table.common.models.Protocol` for possible values. + """ + sas = _SharedAccessHelper() + sas.add_base( + permission, expiry, start, ip_address_or_range, protocol, self.x_ms_version + ) + sas.add_account(services, resource_types) + sas.add_account_signature(self.account_name, self.account_key) + + return sas.get_token() + + +class QueryStringConstants(object): + SIGNED_SIGNATURE = "sig" + SIGNED_PERMISSION = "sp" + SIGNED_START = "st" + SIGNED_EXPIRY = "se" + SIGNED_RESOURCE = "sr" + SIGNED_IDENTIFIER = "si" + SIGNED_IP = "sip" + SIGNED_PROTOCOL = "spr" + SIGNED_VERSION = "sv" + SIGNED_CACHE_CONTROL = "rscc" + SIGNED_CONTENT_DISPOSITION = "rscd" + SIGNED_CONTENT_ENCODING = "rsce" + SIGNED_CONTENT_LANGUAGE = "rscl" + SIGNED_CONTENT_TYPE = "rsct" + START_PK = "spk" + START_RK = "srk" + END_PK = "epk" + END_RK = "erk" + SIGNED_RESOURCE_TYPES = "srt" + SIGNED_SERVICES = "ss" + TABLE_NAME = "tn" + + @staticmethod + def to_list(): + return [ + QueryStringConstants.SIGNED_SIGNATURE, + QueryStringConstants.SIGNED_PERMISSION, + QueryStringConstants.SIGNED_START, + QueryStringConstants.SIGNED_EXPIRY, + QueryStringConstants.SIGNED_RESOURCE, + QueryStringConstants.SIGNED_IDENTIFIER, + QueryStringConstants.SIGNED_IP, + QueryStringConstants.SIGNED_PROTOCOL, + QueryStringConstants.SIGNED_VERSION, + QueryStringConstants.SIGNED_CACHE_CONTROL, + QueryStringConstants.SIGNED_CONTENT_DISPOSITION, + QueryStringConstants.SIGNED_CONTENT_ENCODING, + QueryStringConstants.SIGNED_CONTENT_LANGUAGE, + QueryStringConstants.SIGNED_CONTENT_TYPE, + QueryStringConstants.START_PK, + QueryStringConstants.START_RK, + QueryStringConstants.END_PK, + QueryStringConstants.END_RK, + QueryStringConstants.SIGNED_RESOURCE_TYPES, + QueryStringConstants.SIGNED_SERVICES, + QueryStringConstants.TABLE_NAME, + ] + + +class _SharedAccessHelper(object): + def __init__(self): + self.query_dict = {} + + def _add_query(self, name, val): + if val: + self.query_dict[name] = _to_str(val) + + def add_base(self, permission, expiry, start, ip, protocol, x_ms_version): + if isinstance(start, date): + start = _to_utc_datetime(start) + + if isinstance(expiry, date): + expiry = _to_utc_datetime(expiry) + + self._add_query(QueryStringConstants.SIGNED_START, start) + self._add_query(QueryStringConstants.SIGNED_EXPIRY, expiry) + self._add_query(QueryStringConstants.SIGNED_PERMISSION, permission) + self._add_query(QueryStringConstants.SIGNED_IP, ip) + self._add_query(QueryStringConstants.SIGNED_PROTOCOL, protocol) + self._add_query(QueryStringConstants.SIGNED_VERSION, x_ms_version) + + def add_resource(self, resource): + self._add_query(QueryStringConstants.SIGNED_RESOURCE, resource) + + def add_id(self, id): # pylint: disable=redefined-builtin + self._add_query(QueryStringConstants.SIGNED_IDENTIFIER, id) + + def add_account(self, services, resource_types): + self._add_query(QueryStringConstants.SIGNED_SERVICES, services) + self._add_query(QueryStringConstants.SIGNED_RESOURCE_TYPES, resource_types) + + def add_override_response_headers( + self, + cache_control, + content_disposition, + content_encoding, + content_language, + content_type, + ): + self._add_query(QueryStringConstants.SIGNED_CACHE_CONTROL, cache_control) + self._add_query( + QueryStringConstants.SIGNED_CONTENT_DISPOSITION, content_disposition + ) + self._add_query(QueryStringConstants.SIGNED_CONTENT_ENCODING, content_encoding) + self._add_query(QueryStringConstants.SIGNED_CONTENT_LANGUAGE, content_language) + self._add_query(QueryStringConstants.SIGNED_CONTENT_TYPE, content_type) + + def add_resource_signature(self, account_name, account_key, service, path): + def get_value_to_append(query): + return_value = self.query_dict.get(query) or "" + return return_value + "\n" + + if path[0] != "/": + path = "/" + path + + canonicalized_resource = "/" + service + "/" + account_name + path + "\n" + + # Form the string to sign from shared_access_policy and canonicalized + # resource. The order of values is important. + string_to_sign = ( + get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + + get_value_to_append(QueryStringConstants.SIGNED_START) + + get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + + canonicalized_resource + + get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER) + + get_value_to_append(QueryStringConstants.SIGNED_IP) + + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + + get_value_to_append(QueryStringConstants.SIGNED_VERSION) + ) + + if service in ["blob", "file"]: + string_to_sign += ( + get_value_to_append(QueryStringConstants.SIGNED_CACHE_CONTROL) + + get_value_to_append(QueryStringConstants.SIGNED_CONTENT_DISPOSITION) + + get_value_to_append(QueryStringConstants.SIGNED_CONTENT_ENCODING) + + get_value_to_append(QueryStringConstants.SIGNED_CONTENT_LANGUAGE) + + get_value_to_append(QueryStringConstants.SIGNED_CONTENT_TYPE) + ) + + # remove the trailing newline + if string_to_sign[-1] == "\n": + string_to_sign = string_to_sign[:-1] + + self._add_query( + QueryStringConstants.SIGNED_SIGNATURE, + _sign_string(account_key, string_to_sign), + ) + + def add_account_signature(self, account_name, account_key): + # type: (str, str) -> None + def get_value_to_append(query): + return_value = self.query_dict.get(query) or "" + return return_value + "\n" + + string_to_sign = ( + account_name + + "\n" + + get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + + get_value_to_append(QueryStringConstants.SIGNED_SERVICES) + + get_value_to_append(QueryStringConstants.SIGNED_RESOURCE_TYPES) + + get_value_to_append(QueryStringConstants.SIGNED_START) + + get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + + get_value_to_append(QueryStringConstants.SIGNED_IP) + + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + + get_value_to_append(QueryStringConstants.SIGNED_VERSION) + ) + + self._add_query( + QueryStringConstants.SIGNED_SIGNATURE, + _sign_string(account_key, string_to_sign), + ) + + def get_token(self): + # type: () -> str + return "&".join( + [ + "{0}={1}".format(n, url_quote(v)) + for n, v in self.query_dict.items() + if v is not None + ] + ) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_batch.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_batch.py new file mode 100644 index 000000000000..89fe84b1117a --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_batch.py @@ -0,0 +1,686 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import ( + TYPE_CHECKING, + Union, + Any, + Dict, + Mapping, + Optional, + List +) + +from azure.core import MatchConditions + +from ._common_conversion import _transform_patch_to_cosmos_post +from ._models import UpdateMode +from ._serialize import _get_match_headers, _add_entity_properties +from ._entity import TableEntity + +if TYPE_CHECKING: + from azure.core.pipeline.transport import HttpRequest + import msrest + from ._generated import models, AzureTable + from ._generated._configuration import AzureTableConfiguration + +EntityType = Union[TableEntity, Mapping[str, Any]] + + + +class TableBatchOperations(object): + """ + This is the class that is used for batch operations for the data tables + service. + + The Tables service supports batch transactions on entities that are in the + same table and belong to the same partition group. Multiple operations are + supported within a single transaction. The batch can include at most 100 + entities, and its total payload may be no more than 4 MB in size. + + """ + + def __init__( + self, + client, # type: AzureTable + serializer, # type: msrest.Serializer + deserializer, # type: msrest.Deserializer + config, # type: AzureTableConfiguration + table_name, # type: str + is_cosmos_endpoint=False, # type: bool + **kwargs # type: Dict[str, Any] + ): + """Create TableClient from a Credential. + + :param client: an AzureTable object + :type client: AzureTable + :param serializer: serializer object for request serialization + :type serializer: msrest.Serializer + :param deserializer: deserializer object for request serialization + :type deserializer: msrest.Deserializer + :param config: Azure Table Configuration object + :type config: AzureTableConfiguration + :param table_name: name of the Table to perform operations on + :type table_name: str + :param table_client: TableClient object to perform operations on + :type table_client: TableClient + + :returns: None + """ + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + self._is_cosmos_endpoint = is_cosmos_endpoint + self.table_name = table_name + + self._partition_key = kwargs.pop("partition_key", None) + self.requests = [] # type: List[HttpRequest] + + def __len__(self): + return len(self.requests) + + def _verify_partition_key( + self, entity # type: EntityType + ): + # (...) -> None + if self._partition_key is None: + self._partition_key = entity["PartitionKey"] + elif entity["PartitionKey"] != self._partition_key: + raise ValueError("Partition Keys must all be the same") + + def create( + self, + entity, # type: EntityType + **kwargs # type: Any + ): + # type: (...) -> None + """Adds an insert operation to the current batch. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :return: None + :rtype: None + :raises ValueError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + + if "PartitionKey" in temp and "RowKey" in temp: + temp = _add_entity_properties(temp) + else: + raise ValueError("PartitionKey and/or RowKey were not provided in entity") + self._batch_create_entity(table=self.table_name, entity=temp, **kwargs) + + def _batch_create_entity( + self, + table, # type: str + entity, # type: EntityType + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + response_preference="return-no-content", # type: Optional[Union[str, "models.ResponseFormat"]] + query_options=None, # type: Optional["models.QueryOptions"] + **kwargs # type: Any + ): + # (...) -> None + """ + Adds an insert operation to the batch. See + :func:`azure.data.tables.TableClient.insert_entity` for more information + on insert operations. + + The operation will not be executed until the batch is committed + + :param: table: + The table to perform the operation on + :type: table: str + :param: entity: + The entity to insert. Can be a dict or an entity object + Must contain a PartitionKey and a RowKey. + :type: entity: dict or :class:`~azure.data.tables.models.Entity` + """ + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json;odata=nometadata") + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self._batch_create_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + if response_preference is not None: + header_parameters["Prefer"] = self._serialize.header( + "response_preference", response_preference, "str" + ) + header_parameters["Content-Type"] = self._serialize.header( + "content_type", content_type, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + + body_content_kwargs = {} # type: Dict[str, Any] + if entity is not None: + body_content = self._serialize.body(entity, "{object}") + else: + body_content = None + body_content_kwargs["content"] = body_content + request = self._client._client.post( # pylint: disable=protected-access + url, query_parameters, header_parameters, **body_content_kwargs + ) + self.requests.append(request) + + _batch_create_entity.metadata = {"url": "/{table}"} # type: ignore + + def update( + self, + entity, # type: EntityType + mode=UpdateMode.MERGE, # type: Union[str, UpdateMode] + **kwargs # type: Any + ): + # (...) -> None + + """Adds an update operation to the current batch. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :keyword str etag: Etag of the entity + :keyword match_condition: MatchCondition + :paramtype match_condition: ~azure.core.MatchCondition + :return: None + :rtype: None + :raises ValueError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and not etag: + try: + etag = entity.metadata.get("etag", None) # type: ignore + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + partition_key = temp["PartitionKey"] + row_key = temp["RowKey"] + temp = _add_entity_properties(temp) + if mode is UpdateMode.REPLACE: + self._batch_update_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + table_entity_properties=temp, + **kwargs + ) + elif mode is UpdateMode.MERGE: + self._batch_merge_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + table_entity_properties=temp, + **kwargs + ) + + def _batch_update_entity( + self, + table, # type: str + partition_key, # type: str + row_key, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + if_match=None, # type: Optional[str] + table_entity_properties=None, # type: Optional[EntityType] + query_options=None, # type: Optional["models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Update entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a replace will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :return: None + :rtype: None + """ + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._batch_update_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + "partitionKey": self._serialize.url("partition_key", partition_key, "str"), + "rowKey": self._serialize.url("row_key", row_key, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + if if_match is not None: + header_parameters["If-Match"] = self._serialize.header( + "if_match", if_match, "str" + ) + header_parameters["Content-Type"] = self._serialize.header( + "content_type", content_type, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, "{object}") + else: + body_content = None + body_content_kwargs["content"] = body_content + request = self._client._client.put( # pylint: disable=protected-access + url, query_parameters, header_parameters, **body_content_kwargs + ) + self.requests.append(request) + + _batch_update_entity.metadata = { # type: ignore + "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" + } # type: ignore + + def _batch_merge_entity( + self, + table, # type: str + partition_key, # type: str + row_key, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + if_match=None, # type: Optional[str] + table_entity_properties=None, # type: Optional[EntityType] + query_options=None, # type: Optional["models.QueryOptions"] + **kwargs # type: Any + ): + # type: (...) -> None + """Merge entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a merge will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :return: None + :rtype: None + """ + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._batch_merge_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + "partitionKey": self._serialize.url("partition_key", partition_key, "str"), + "rowKey": self._serialize.url("row_key", row_key, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + if if_match is not None: + header_parameters["If-Match"] = self._serialize.header( + "if_match", if_match, "str" + ) + header_parameters["Content-Type"] = self._serialize.header( + "content_type", content_type, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, "{object}") + else: + body_content = None + body_content_kwargs["content"] = body_content + request = self._client._client.patch( # pylint: disable=protected-access + url, query_parameters, header_parameters, **body_content_kwargs + ) + if self._is_cosmos_endpoint: + _transform_patch_to_cosmos_post(request) + self.requests.append(request) + + _batch_merge_entity.metadata = { # type: ignore + "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" + } + + def delete( + self, + entity, # type: EntityType + **kwargs # type: Any + ): + # type: (...) -> None + """Adds a delete operation to the current branch. + + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :keyword str etag: Etag of the entity + :keyword match_condition: MatchCondition + :paramtype match_condition: ~azure.core.MatchCondition + :raises ValueError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + partition_key = temp["PartitionKey"] + row_key = temp["RowKey"] + + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and not etag: + try: + etag = entity.metadata.get("etag", None) # type: ignore + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + self._batch_delete_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + **kwargs + ) + + def _batch_delete_entity( + self, + table, # type: str + partition_key, # type: str + row_key, # type: str + if_match, # type: str + timeout=None, # type: Optional[int] + request_id_parameter=None, # type: Optional[str] + query_options=None, # type: Optional["models.QueryOptions"] + ): + # type: (...) -> None + """Deletes the specified entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param if_match: Match condition for an entity to be deleted. If specified and a matching + entity is not found, an error will be raised. To force an unconditional delete, set to the + wildcard character (*). + :type if_match: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + :return: None + :rtype: None + """ + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self._batch_delete_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + "partitionKey": self._serialize.url("partition_key", partition_key, "str"), + "rowKey": self._serialize.url("row_key", row_key, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + header_parameters["If-Match"] = self._serialize.header( + "if_match", if_match, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + + request = self._client._client.delete( # pylint: disable=protected-access + url, query_parameters, header_parameters + ) + self.requests.append(request) + + _batch_delete_entity.metadata = { # type: ignore + "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" + } + + def upsert( + self, + entity, # type: EntityType + mode=UpdateMode.MERGE, # type: Union[str, UpdateMode] + **kwargs # type: Any + ): + # type: (...) -> None + """Adds an upsert (update/merge) operation to the batch. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :raises ValueError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + + partition_key = temp["PartitionKey"] + row_key = temp["RowKey"] + temp = _add_entity_properties(temp) + + if mode is UpdateMode.MERGE: + self._batch_merge_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=temp, + **kwargs + ) + elif mode is UpdateMode.REPLACE: + self._batch_update_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=temp, + **kwargs + ) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_client.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_client.py new file mode 100644 index 000000000000..14e9e437f461 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_client.py @@ -0,0 +1,731 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import functools +from typing import Optional, Any, TYPE_CHECKING, Union, List, Tuple, Dict, Mapping, Iterable, overload, cast +try: + from urllib.parse import urlparse, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import unquote # type: ignore + +from azure.core import MatchConditions +from azure.core.exceptions import HttpResponseError +from azure.core.paging import ItemPaged +from azure.core.tracing.decorator import distributed_trace + +from ._deserialize import _convert_to_entity, _trim_service_metadata +from ._entity import TableEntity +from ._error import ( + _process_table_error, + _validate_table_name, + _reraise_error, + _decode_error +) +from ._generated.models import ( + SignedIdentifier, + TableProperties, + QueryOptions +) +from ._serialize import _get_match_headers, _add_entity_properties +from ._base_client import parse_connection_str, TablesBaseClient +from ._serialize import serialize_iso, _parameter_filter_substitution +from ._deserialize import deserialize_iso, _return_headers_and_deserialized +from ._table_batch import TableBatchOperations +from ._models import ( + TableEntityPropertiesPaged, + UpdateMode, + TableAccessPolicy, + TransactionOperation, + TableItem +) + +EntityType = Union[TableEntity, Mapping[str, Any]] +OperationType = Union[TransactionOperation, str] +TransactionOperationType = Union[Tuple[OperationType, EntityType], Tuple[OperationType, EntityType, Mapping[str, Any]]] + +if TYPE_CHECKING: + from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential + + +class TableClient(TablesBaseClient): + """A client to interact with a specific Table in an Azure Tables account. + + :ivar str account_name: The name of the Tables account. + :ivar str table_name: The name of the table. + :ivar str url: The full URL to the Tables account. + """ + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, + endpoint, # type: str + table_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Create TableClient from a Credential. + + :param str endpoint: A URL to an Azure Tables account. + :param str table_name: The table name. + :keyword credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core), + AzureSasCredential (azure-core), or TokenCredentials from azure-identity. + :paramtype credential: + :class:`~azure.core.credentials.AzureNamedKeyCredential` or + :class:`~azure.core.credentials.AzureSasCredential` or + :class:`~azure.core.credentials.TokenCredential` + :returns: None + """ + if not table_name: + raise ValueError("Please specify a table name.") + _validate_table_name(table_name) + self.table_name = table_name + super(TableClient, self).__init__(endpoint, **kwargs) + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) + + @classmethod + def from_connection_string( + cls, + conn_str, # type: str + table_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> TableClient + """Create TableClient from a Connection String. + + :param str conn_str: A connection string to an Azure Tables account. + :param str table_name: The table name. + :returns: A table client. + :rtype: :class:`~azure.data.tables.TableClient` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_create_client.py + :start-after: [START create_table_client] + :end-before: [END create_table_client] + :language: python + :dedent: 8 + :caption: Authenticating a TableServiceClient from a connection_string + """ + endpoint, credential = parse_connection_str( + conn_str=conn_str, credential=None, keyword_args=kwargs + ) + return cls(endpoint, table_name=table_name, credential=credential, **kwargs) + + @classmethod + def from_table_url(cls, table_url, **kwargs): + # type: (str, Any) -> TableClient + """A client to interact with a specific Table. + + :param str table_url: The full URI to the table, including SAS token if used. + :keyword credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be one of AzureNamedKeyCredential + or AzureSasCredential from azure-core. + :paramtype credential: + :class:`~azure.core.credentials.AzureNamedKeyCredential` or + :class:`~azure.core.credentials.AzureSasCredential` + :returns: A table client. + :rtype: :class:`~azure.data.tables.TableClient` + """ + try: + if not table_url.lower().startswith("http"): + table_url = "https://" + table_url + except AttributeError: + raise ValueError("Table URL must be a string.") + parsed_url = urlparse(table_url.rstrip("/")) + + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(table_url)) + + table_path = parsed_url.path.lstrip("/").split("/") + account_path = "" + if len(table_path) > 1: + account_path = "/" + "/".join(table_path[:-1]) + endpoint = "{}://{}{}?{}".format( + parsed_url.scheme, + parsed_url.netloc.rstrip("/"), + account_path, + parsed_url.query, + ) + table_name = unquote(table_path[-1]) + if table_name.lower().startswith("tables('"): + table_name = table_name[8:-2] + if not table_name: + raise ValueError( + "Invalid URL. Please provide a URL with a valid table name" + ) + return cls(endpoint, table_name=table_name, **kwargs) + + @distributed_trace + def get_table_access_policy( + self, **kwargs # type: Any + ): + # type: (...) -> Dict[str, Optional[TableAccessPolicy]] + """Retrieves details about any stored access policies specified on the table that may be + used with Shared Access Signatures. + + :return: Dictionary of SignedIdentifiers + :rtype: Dict[str, Optional[:class:`~azure.data.tables.TableAccessPolicy`]] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + timeout = kwargs.pop("timeout", None) + try: + _, identifiers = self._client.table.get_access_policy( + table=self.table_name, + timeout=timeout, + cls=kwargs.pop("cls", None) or _return_headers_and_deserialized, + **kwargs + ) + except HttpResponseError as error: + _process_table_error(error) + output = {} # type: Dict[str, Optional[TableAccessPolicy]] + for identifier in cast(List[SignedIdentifier], identifiers): + if identifier.access_policy: + output[identifier.id] = TableAccessPolicy( + start=deserialize_iso(identifier.access_policy.start), + expiry=deserialize_iso(identifier.access_policy.expiry), + permission=identifier.access_policy.permission + ) + else: + output[identifier.id] = None + return output + + @distributed_trace + def set_table_access_policy( + self, + signed_identifiers, # type: Dict[str, Optional[TableAccessPolicy]] + **kwargs + ): + # type: (...) -> None + """Sets stored access policies for the table that may be used with Shared Access Signatures. + + :param signed_identifiers: Access policies to set for the table + :type signed_identifiers: Dict[str, Optional[:class:`~azure.data.tables.TableAccessPolicy`]] + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + identifiers = [] + for key, value in signed_identifiers.items(): + payload = None + if value: + payload = TableAccessPolicy( + start=serialize_iso(value.start), + expiry=serialize_iso(value.expiry), + permission=value.permission + ) + identifiers.append(SignedIdentifier(id=key, access_policy=payload)) + signed_identifiers = identifiers # type: ignore + try: + self._client.table.set_access_policy( + table=self.table_name, table_acl=signed_identifiers or None, **kwargs # type: ignore + ) + except HttpResponseError as error: + try: + _process_table_error(error) + except HttpResponseError as table_error: + if (table_error.error_code == 'InvalidXmlDocument' # type: ignore + and len(signed_identifiers) > 5): + raise ValueError( + 'Too many access policies provided. The server does not support setting ' + 'more than 5 access policies on a single resource.' + ) + raise + + @distributed_trace + def create_table( + self, **kwargs # type: Any + ): + # type: (...) -> TableItem + """Creates a new table under the current account. + + :return: A TableItem representing the created table. + :rtype: :class:`~azure.data.tables.TableItem` + :raises: :class:`~azure.core.exceptions.ResourceExistsError` If the entity already exists + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_create_delete_table.py + :start-after: [START create_table_from_table_client] + :end-before: [END create_table_from_table_client] + :language: python + :dedent: 8 + :caption: Creating a table from the TableClient object + """ + table_properties = TableProperties(table_name=self.table_name) + try: + result = self._client.table.create(table_properties, **kwargs) + except HttpResponseError as error: + _process_table_error(error) + return TableItem(name=result.table_name) # type: ignore + + @distributed_trace + def delete_table( + self, **kwargs # type: Any + ): + # type: (...) -> None + """Deletes the table under the current account. No error will be raised + if the table does not exist + + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_create_delete_table.py + :start-after: [START delete_table_from_table_client] + :end-before: [END delete_table_from_table_client] + :language: python + :dedent: 8 + :caption: Deleting a table from the TableClient object + """ + try: + self._client.table.delete(table=self.table_name, **kwargs) + except HttpResponseError as error: + if error.status_code == 404: + return + _process_table_error(error) + + @overload + def delete_entity(self, partition_key, row_key, **kwargs): + # type: (str, str, Any) -> None + pass + + @overload + def delete_entity(self, entity, **kwargs): + # type: (Union[TableEntity, Mapping[str, Any]], Any) -> None + pass + + @distributed_trace + def delete_entity(self, *args, **kwargs): + # type: (Union[TableEntity, str], Any) -> None + """Deletes the specified entity in a table. No error will be raised if + the entity or PartitionKey-RowKey pairing is not found. + + :param str partition_key: The partition key of the entity. + :param str row_key: The row key of the entity. + :param entity: The entity to delete + :type entity: Union[TableEntity, Mapping[str, str]] + :keyword str etag: Etag of the entity + :keyword match_condition: The condition under which to perform the operation. + Supported values include: MatchConditions.IfNotModified, MatchConditions.Unconditionally. + The default value is Unconditionally. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_insert_delete_entities.py + :start-after: [START delete_entity] + :end-before: [END delete_entity] + :language: python + :dedent: 12 + :caption: Deleting an entity of a Table + """ + try: + entity = kwargs.pop('entity', None) + if not entity: + entity = args[0] + partition_key = entity['PartitionKey'] + row_key = entity['RowKey'] + except (TypeError, IndexError): + partition_key = kwargs.pop('partition_key', None) + if not partition_key: + partition_key = args[0] + row_key = kwargs.pop("row_key", None) + if not row_key: + row_key = args[1] + + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and entity and not etag: + try: + etag = entity.metadata.get("etag", None) + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + try: + self._client.table.delete_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + **kwargs + ) + except HttpResponseError as error: + if error.status_code == 404: + return + _process_table_error(error) + + @distributed_trace + def create_entity( + self, + entity, # type: EntityType + **kwargs # type: Any + ): + # type: (...) -> Dict[str,str] + """Insert entity in a table. + + :param entity: The properties for the table entity. + :type entity: Union[TableEntity, Mapping[str, Any]] + :return: Dictionary mapping operation metadata returned from the service + :rtype: Dict[str,str] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_insert_delete_entities.py + :start-after: [START create_entity] + :end-before: [END create_entity] + :language: python + :dedent: 12 + :caption: Creating and adding an entity to a Table + """ + entity = _add_entity_properties(entity) + try: + metadata, content = self._client.table.insert_entity( # type: ignore + table=self.table_name, + table_entity_properties=entity, # type: ignore + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + except HttpResponseError as error: + decoded = _decode_error(error.response, error.message) + if decoded.error_code == "PropertiesNeedValue": + if entity.get("PartitionKey") is None: + raise ValueError("PartitionKey must be present in an entity") + if entity.get("RowKey") is None: + raise ValueError("RowKey must be present in an entity") + _reraise_error(error) + return _trim_service_metadata(metadata, content=content) # type: ignore + + @distributed_trace + def update_entity( + self, + entity, # type: EntityType + mode=UpdateMode.MERGE, # type: UpdateMode + **kwargs # type: Any + ): + # type: (...) -> Dict[str,str] + """Update entity in a table. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :keyword str etag: Etag of the entity + :keyword match_condition: The condition under which to perform the operation. + Supported values include: MatchConditions.IfNotModified, MatchConditions.Unconditionally. + The default value is Unconditionally. + :paramtype match_condition: ~azure.core.MatchConditions + :return: Dictionary mapping operation metadata returned from the service + :rtype: Dict[str,str] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_update_upsert_merge_entities.py + :start-after: [START update_entity] + :end-before: [END update_entity] + :language: python + :dedent: 16 + :caption: Updating an already exiting entity in a Table + """ + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and not etag: + try: + etag = entity.metadata.get("etag", None) # type: ignore + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + partition_key = entity["PartitionKey"] + row_key = entity["RowKey"] + entity = _add_entity_properties(entity) + try: + metadata = None + content = None + if mode is UpdateMode.REPLACE: + metadata, content = self._client.table.update_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=entity, # type: ignore + if_match=if_match, + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + elif mode is UpdateMode.MERGE: + metadata, content = self._client.table.merge_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + table_entity_properties=entity, # type: ignore + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + else: + raise ValueError("Mode type is not supported") + except HttpResponseError as error: + _process_table_error(error) + return _trim_service_metadata(metadata, content=content) # type: ignore + + @distributed_trace + def list_entities( + self, **kwargs # type: Any + ): + # type: (...) -> ItemPaged[TableEntity] + """Lists entities in a table. + + :keyword int results_per_page: Number of entities returned per service request. + :keyword select: Specify desired properties of an entity to return. + :paramtype select: str or List[str] + :return: ItemPaged[:class:`~azure.data.tables.TableEntity`] + :rtype: ~azure.core.paging.ItemPaged + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_update_upsert_merge_entities.py + :start-after: [START query_entities] + :end-before: [END query_entities] + :language: python + :dedent: 16 + :caption: List all entities held within a table + """ + user_select = kwargs.pop("select", None) + if user_select and not isinstance(user_select, str): + user_select = ",".join(user_select) + top = kwargs.pop("results_per_page", None) + + command = functools.partial(self._client.table.query_entities, **kwargs) + return ItemPaged( + command, + table=self.table_name, + results_per_page=top, + select=user_select, + page_iterator_class=TableEntityPropertiesPaged, + ) + + @distributed_trace + def query_entities( + self, + query_filter, + **kwargs + ): + # type: (str, Dict[str, Any]) -> ItemPaged[TableEntity] + """Lists entities in a table. + + :param str query_filter: Specify a filter to return certain entities + :keyword int results_per_page: Number of entities returned per service request. + :keyword select: Specify desired properties of an entity to return. + :paramtype select: str or List[str] + :keyword parameters: Dictionary for formatting query with additional, user defined parameters + :paramtype parameters: Dict[str, Any] + :return: ItemPaged[:class:`~azure.data.tables.TableEntity`] + :rtype: ~azure.core.paging.ItemPaged + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_query_table.py + :start-after: [START query_entities] + :end-before: [END query_entities] + :language: python + :dedent: 8 + :caption: Query entities held within a table + """ + parameters = kwargs.pop("parameters", None) + query_filter = _parameter_filter_substitution( + parameters, query_filter # type: ignore + ) + top = kwargs.pop("results_per_page", None) + user_select = kwargs.pop("select", None) + if user_select and not isinstance(user_select, str): + user_select = ",".join(user_select) # type: ignore + + command = functools.partial(self._client.table.query_entities, **kwargs) + return ItemPaged( + command, + table=self.table_name, + results_per_page=top, + filter=query_filter, + select=user_select, + page_iterator_class=TableEntityPropertiesPaged, + ) + + @distributed_trace + def get_entity( + self, + partition_key, # type: str + row_key, # type: str + **kwargs # type: Any + ): + # type: (...) -> TableEntity + """Get a single entity in a table. + + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :keyword select: Specify desired properties of an entity to return. + :paramtype select: str or List[str] + :return: Dictionary mapping operation metadata returned from the service + :rtype: :class:`~azure.data.tables.TableEntity` + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_update_upsert_merge_entities.py + :start-after: [START get_entity] + :end-before: [END get_entity] + :language: python + :dedent: 16 + :caption: Get a single entity from a table + """ + user_select = kwargs.pop("select", None) + if user_select and not isinstance(user_select, str): + user_select = ",".join(user_select) + try: + entity = self._client.table.query_entity_with_partition_and_row_key( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + query_options=QueryOptions(select=user_select), + **kwargs + ) + except HttpResponseError as error: + _process_table_error(error) + return _convert_to_entity(entity) + + @distributed_trace + def upsert_entity( + self, + entity, # type: EntityType + mode=UpdateMode.MERGE, # type: UpdateMode + **kwargs # type: Any + ): + # type: (...) -> Dict[str,str] + """Update/Merge or Insert entity into table. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :return: Dictionary mapping operation metadata returned from the service + :rtype: Dict[str,str] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_update_upsert_merge_entities.py + :start-after: [START upsert_entity] + :end-before: [END upsert_entity] + :language: python + :dedent: 16 + :caption: Update/merge or insert an entity into a table + """ + + partition_key = entity["PartitionKey"] + row_key = entity["RowKey"] + entity = _add_entity_properties(entity) + try: + metadata = None + content = None + if mode is UpdateMode.MERGE: + metadata, content = self._client.table.merge_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=entity, # type: ignore + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + elif mode is UpdateMode.REPLACE: + metadata, content = self._client.table.update_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=entity, # type: ignore + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + else: + raise ValueError( + """Update mode {} is not supported. + For a list of supported modes see the UpdateMode enum""".format( + mode + ) + ) + except HttpResponseError as error: + _process_table_error(error) + return _trim_service_metadata(metadata, content=content) # type: ignore + + def submit_transaction( + self, + operations, # type: Iterable[TransactionOperationType] + **kwargs # type: Any + ): + # type: (...) -> List[Mapping[str, Any]] + """Commit a list of operations as a single transaction. + + If any one of these operations fails, the entire transaction will be rejected. + + :param operations: The list of operations to commit in a transaction. This should be a list of + tuples containing an operation name, the entity on which to operate, and optionally, a dict of additional + kwargs for that operation. + :type operations: Iterable[Tuple[str, EntityType]] + :return: A list of mappings with response metadata for each operation in the transaction. + :rtype: List[Mapping[str, Any]] + :raises: :class:`~azure.data.tables.TableTransactionError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Using transactions to send multiple requests at once + """ + batched_requests = TableBatchOperations( + self._client, + self._client._serialize, # pylint: disable=protected-access + self._client._deserialize, # pylint: disable=protected-access + self._client._config, # pylint: disable=protected-access + self.table_name, + is_cosmos_endpoint=self._cosmos_endpoint, + **kwargs + ) + for operation in operations: + try: + operation_kwargs = operation[2] # type: ignore + except IndexError: + operation_kwargs = {} + try: + getattr(batched_requests, operation[0].lower())(operation[1], **operation_kwargs) + except AttributeError: + raise ValueError("Unrecognized operation: {}".format(operation[0])) + return self._batch_send(*batched_requests.requests, **kwargs) # type: ignore diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_service_client.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_service_client.py new file mode 100644 index 000000000000..b970f02e215d --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_service_client.py @@ -0,0 +1,337 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +import functools +from typing import Any, Dict, TYPE_CHECKING +from azure.core.exceptions import HttpResponseError, ResourceExistsError +from azure.core.paging import ItemPaged +from azure.core.tracing.decorator import distributed_trace +from azure.core.pipeline import Pipeline + +from ._generated.models import TableServiceProperties +from ._models import ( + TablePropertiesPaged, + service_stats_deserialize, + service_properties_deserialize, + TableItem +) +from ._base_client import parse_connection_str, TablesBaseClient, TransportWrapper +from ._models import LocationMode +from ._error import _process_table_error +from ._table_client import TableClient +from ._serialize import _parameter_filter_substitution + +if TYPE_CHECKING: + from ._models import TableCorsRule, TableMetrics, TableAnalyticsLogging + + +class TableServiceClient(TablesBaseClient): + """A client to interact with the Table Service at the account level. + + This client provides operations to retrieve and configure the account properties + as well as list, create and delete tables within the account. + For operations relating to a specific table, a client for this entity + can be retrieved using the :func:`~get_table_client` function. + + :ivar str account_name: The name of the Tables account. + :ivar str url: The full URL to the Tables account. + :param str endpoint: + The URL to the table service endpoint. Any other entities included + in the URL path (e.g. table) will be discarded. This URL can be optionally + authenticated with a SAS token. + :keyword credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core), + AzureSasCredential (azure-core), or TokenCredentials from azure-identity. + :paramtype credential: + :class:`~azure.core.credentials.AzureNamedKeyCredential` or + :class:`~azure.core.credentials.AzureSasCredential` or + :class:`~azure.core.credentials.TokenCredential` + :keyword str api_version: + The Storage API version to use for requests. Default value is '2019-02-02'. + Setting to an older version may result in reduced feature compatibility. + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_authentication.py + :start-after: [START auth_from_sas] + :end-before: [END auth_from_sas] + :language: python + :dedent: 8 + :caption: Authenticating a TableServiceClient from a Shared Access Key + + .. literalinclude:: ../samples/sample_authentication.py + :start-after: [START auth_from_shared_key] + :end-before: [END auth_from_shared_key] + :language: python + :dedent: 8 + :caption: Authenticating a TableServiceClient from a Shared Account Key + """ + + def _format_url(self, hostname): + # type: (str) -> str + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) + + @classmethod + def from_connection_string(cls, conn_str, **kwargs): + # type: (str, Any) -> TableServiceClient + """Create TableServiceClient from a connection string. + + :param str conn_str: A connection string to an Azure Storage or Cosmos account. + :returns: A Table service client. + :rtype: :class:`~azure.data.tables.TableServiceClient` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_authentication.py + :start-after: [START auth_from_connection_string] + :end-before: [END auth_from_connection_string] + :language: python + :dedent: 8 + :caption: Authenticating a TableServiceClient from a connection_string + """ + endpoint, credential = parse_connection_str( + conn_str=conn_str, credential=None, keyword_args=kwargs + ) + return cls(endpoint, credential=credential, **kwargs) + + @distributed_trace + def get_service_stats(self, **kwargs): + # type: (Any) -> Dict[str, object] + """Retrieves statistics related to replication for the Table service. It is only available on the secondary + location endpoint when read-access geo-redundant replication is enabled for the account. + + :return: Dictionary of service stats + :rtype: Dict[str, object] + :raises: :class:`~azure.core.exceptions.HttpResponseError:` + """ + try: + timeout = kwargs.pop("timeout", None) + stats = self._client.service.get_statistics( # type: ignore + timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs + ) + except HttpResponseError as error: + _process_table_error(error) + return service_stats_deserialize(stats) + + @distributed_trace + def get_service_properties(self, **kwargs): + # type: (Any) -> Dict[str, object] + """Gets the properties of an account's Table service, + including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :return: Dictionary of service properties + :rtype: Dict[str, object] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + timeout = kwargs.pop("timeout", None) + try: + service_props = self._client.service.get_properties(timeout=timeout, **kwargs) # type: ignore + except HttpResponseError as error: + _process_table_error(error) + return service_properties_deserialize(service_props) + + @distributed_trace + def set_service_properties(self, **kwargs): + # type: (Any) -> None + """Sets properties for an account's Table service endpoint, + including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :keyword analytics_logging: Properties for analytics + :paramtype analytics_logging: ~azure.data.tables.TableAnalyticsLogging + :keyword hour_metrics: Hour level metrics + :paramtype hour_metrics: ~azure.data.tables.TableMetrics + :keyword minute_metrics: Minute level metrics + :paramtype minute_metrics: ~azure.data.tables.TableMetrics + :keyword cors: Cross-origin resource sharing rules + :paramtype cors: List[~azure.data.tables.TableCorsRule] + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + cors = kwargs.pop('cors', None) + if cors: + cors = [c._to_generated() for c in cors] # pylint:disable=protected-access + props = TableServiceProperties( + logging=kwargs.pop('analytics_logging', None), + hour_metrics=kwargs.pop('hour_metrics', None), + minute_metrics=kwargs.pop('minute_metrics', None), + cors=cors, # type: ignore + ) + try: + self._client.service.set_properties(props, **kwargs) + except HttpResponseError as error: + _process_table_error(error) + + @distributed_trace + def create_table(self, table_name, **kwargs): + # type: (str, Any) -> TableClient + """Creates a new table under the current account. + + :param table_name: The Table name. + :type table_name: str + :return: TableClient + :rtype: :class:`~azure.data.tables.TableClient` + :raises: :class:`~azure.core.exceptions.ResourceExistsError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_create_delete_table.py + :start-after: [START create_table_from_tc] + :end-before: [END create_table_from_tc] + :language: python + :dedent: 8 + :caption: Creating a table from the TableServiceClient object + """ + table = self.get_table_client(table_name=table_name) + table.create_table(**kwargs) + return table + + @distributed_trace + def create_table_if_not_exists(self, table_name, **kwargs): + # type: (str, Any) -> TableClient + """Creates a new table if it does not currently exist. + If the table currently exists, the current table is + returned. + + :param table_name: The Table name. + :type table_name: str + :return: TableClient + :rtype: :class:`~azure.data.tables.TableClient` + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_create_delete_table.py + :start-after: [START create_table_if_not_exists] + :end-before: [END create_table_if_not_exists] + :language: python + :dedent: 8 + :caption: Deleting a table from the TableServiceClient object + """ + table = self.get_table_client(table_name=table_name) + try: + table.create_table(**kwargs) + except ResourceExistsError: + pass + return table + + @distributed_trace + def delete_table(self, table_name, **kwargs): + # type: (str, Any) -> None + """Deletes the table under the current account. No error will be raised + if the given table is not found. + + :param table_name: The Table name. + :type table_name: str + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_create_delete_table.py + :start-after: [START delete_table_from_tc] + :end-before: [END delete_table_from_tc] + :language: python + :dedent: 8 + :caption: Deleting a table from the TableServiceClient object + """ + table = self.get_table_client(table_name=table_name) + table.delete_table(**kwargs) + + @distributed_trace + def query_tables(self, query_filter, **kwargs): + # type: (str, Any) -> ItemPaged[TableItem] + """Queries tables under the given account. + + :param str query_filter: Specify a filter to return certain tables. + :keyword int results_per_page: Number of tables per page in return ItemPaged + :keyword parameters: Dictionary for formatting query with additional, user defined parameters + :paramtype parameters: Dict[str, Any] + :return: ItemPaged[:class:`~azure.data.tables.TableItem`] + :rtype: ~azure.core.paging.ItemPaged + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_query_tables.py + :start-after: [START tsc_query_tables] + :end-before: [END tsc_query_tables] + :language: python + :dedent: 16 + :caption: Querying tables in a storage account + """ + parameters = kwargs.pop("parameters", None) + query_filter = _parameter_filter_substitution( + parameters, query_filter + ) + top = kwargs.pop("results_per_page", None) + + command = functools.partial(self._client.table.query, **kwargs) + return ItemPaged( + command, + results_per_page=top, + filter=query_filter, + page_iterator_class=TablePropertiesPaged, + ) + + @distributed_trace + def list_tables(self, **kwargs): + # type: (Any) -> ItemPaged[TableItem] + """Queries tables under the given account. + + :keyword int results_per_page: Number of tables per page in returned ItemPaged + :return: ItemPaged[:class:`~azure.data.tables.TableItem`] + :rtype: ~azure.core.paging.ItemPaged + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/sample_query_tables.py + :start-after: [START tsc_list_tables] + :end-before: [END tsc_list_tables] + :language: python + :dedent: 16 + :caption: Listing all tables in a storage account + """ + top = kwargs.pop("results_per_page", None) + + command = functools.partial(self._client.table.query, **kwargs) + return ItemPaged( + command, + results_per_page=top, + page_iterator_class=TablePropertiesPaged, + ) + + def get_table_client(self, table_name, **kwargs): + # type: (str, Any) -> TableClient + """Get a client to interact with the specified table. + + The table need not already exist. + + :param str table_name: The table name + :returns: A :class:`~azure.data.tables.TableClient` object. + :rtype: :class:`~azure.data.tables.TableClient` + + """ + pipeline = Pipeline( # type: ignore + transport=TransportWrapper(self._client._client._pipeline._transport), # pylint: disable = protected-access + policies=self._policies + ) + return TableClient( + self.url, + table_name=table_name, + credential=self.credential, + api_version=self.api_version, + pipeline=pipeline, + location_mode=self._location_mode, + _hosts=self._hosts, + **kwargs + ) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_shared_access_signature.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_shared_access_signature.py new file mode 100644 index 000000000000..eec1aac2110b --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_table_shared_access_signature.py @@ -0,0 +1,319 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Union, Any, TYPE_CHECKING + +from ._models import AccountSasPermissions +from ._common_conversion import _sign_string +from ._error import _validate_not_none +from ._constants import X_MS_VERSION +from ._shared_access_signature import ( + _SharedAccessHelper, + SharedAccessSignature, + QueryStringConstants, +) + +if TYPE_CHECKING: + from datetime import datetime + from azure.core.credentials import AzureNamedKeyCredential + from ._models import ResourceTypes + + +def generate_account_sas( + credential, # type: AzureNamedKeyCredential + resource_types, # type: ResourceTypes + permission, # type: Union[str, AccountSasPermissions] + expiry, # type: Union[datetime, str] + **kwargs # type: Any +): + # type: (...) -> str + """ + Generates a shared access signature for the table service. + Use the returned signature with the sas_token parameter of TableService. + + :param credential: Credential for the Azure account + :type credential: :class:`~azure.core.credentials.AzureNamedKeyCredential` + :param resource_types: + Specifies the resource types that are accessible with the account SAS. + :type resource_types: ResourceTypes + :param permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :type permission: str or AccountSasPermissions + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :keyword start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :paramtype start: datetime or str + :keyword str ip_address_or_range: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword protocol: + Specifies the protocol permitted for a request made. + :paramtype protocol: str or SASProtocol + :return: A Shared Access Signature (sas) token. + :rtype: str + """ + _validate_not_none("account_name", credential.named_key.name) + _validate_not_none("account_key", credential.named_key.key) + if permission is str: + permission = AccountSasPermissions.from_string(permission=permission) # type: ignore + sas = TableSharedAccessSignature(credential) + return sas.generate_account( + "t", + resource_types, + permission, + expiry, + start=kwargs.pop("start", None), + ip_address_or_range=kwargs.pop("ip_address_or_range", None), + protocol=kwargs.pop("protocol", None), + ) + + +def generate_table_sas(credential, table_name, **kwargs): + # type: (AzureNamedKeyCredential, str, **Any) -> str + """ + Generates a shared access signature for the table service. + Use the returned signature with the sas_token parameter of TableService. + + + :param credential: Credential used for creating Shared Access Signature + :type credential: :class:`~azure.core.credentials.AzureNamedKeyCredential` + :param table_name: Table name + :type table_name: str + :keyword TableSasPermissions permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :keyword expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :paramtype expiry: datetime or str + :keyword start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :paramtype start: datetime or str + :keyword str ip_address_or_range: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :keyword str policy_id: Access policy ID. + :keyword protocol: + Specifies the protocol permitted for a request made. + :paramtype protocol: str or SASProtocol + :keyword str end_rk: End row key + :keyword str end_pk: End partition key + :keyword str start_rk: Starting row key + :keyword str start_pk: Starting partition key + :return: A Shared Access Signature (sas) token. + :rtype: str + """ + + sas = TableSharedAccessSignature(credential) + return sas.generate_table( + table_name=table_name, + permission=kwargs.pop("permission", None), + expiry=kwargs.pop("expiry", None), + start=kwargs.pop("start", None), + policy_id=kwargs.pop("policy_id", None), + ip=kwargs.pop("ip_address_or_range", None), + protocol=kwargs.pop("protocol", None), + start_pk=kwargs.pop("start_pk", None), + start_rk=kwargs.pop("start_rk", None), + end_pk=kwargs.pop("end_pk", None), + end_rk=kwargs.pop("end_rk", None), + **kwargs + ) # type: ignore + + +class TableSharedAccessSignature(SharedAccessSignature): + """ + Provides a factory for creating file and share access + signature tokens with a common account name and account key. Users can either + use the factory or can construct the appropriate service and use the + generate_*_shared_access_signature method directly. + """ + + def __init__(self, credential): + """ + :param credential: The credential used for authenticating requests + :type credential: :class:`~azure.core.credentials.NamedKeyCredential` + """ + super(TableSharedAccessSignature, self).__init__( + credential, x_ms_version=X_MS_VERSION + ) + + def generate_table( + self, + table_name, + permission=None, + expiry=None, + start=None, + policy_id=None, + ip_address_or_range=None, + protocol=None, + start_pk=None, + start_rk=None, + end_pk=None, + end_rk=None, + **kwargs # pylint: disable=unused-argument + ): + """ + Generates a shared access signature for the table. + Use the returned signature with the sas_token parameter of TableService. + + :param str table_name: + Name of table. + :param TablePermissions permission: + The permissions associated with the shared access signature. The + user is restricted to operations allowed by the permissions. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has been + specified in an associated stored access policy. + :param expiry: + The time at which the shared access signature becomes invalid. + Required unless an id is given referencing a stored access policy + which contains this field. This field must be omitted if it has + been specified in an associated stored access policy. Azure will always + convert values to UTC. If a date is passed in without timezone info, it + is assumed to be UTC. + :type expiry: datetime or str + :param start: + The time at which the shared access signature becomes valid. If + omitted, start time for this call is assumed to be the time when the + storage service receives the request. Azure will always convert values + to UTC. If a date is passed in without timezone info, it is assumed to + be UTC. + :type start: datetime or str + :param str policy_id: + A unique value up to 64 characters in length that correlates to a + stored access policy. To create a stored access policy, use + set_table_service_properties. + :param str ip_address_or_range: + Specifies an IP address or a range of IP addresses from which to accept requests. + If the IP address from which the request originates does not match the IP address + or address range specified on the SAS token, the request is not authenticated. + For example, specifying sip=168.1.5.65 or sip=168.1.5.60-168.1.5.70 on the SAS + restricts the request to those IP addresses. + :param str protocol: + Specifies the protocol permitted for a request made. The default value + is https,http. See :class:`~azure.cosmosdb.table.common.models.Protocol` for possible values. + :param str start_pk: + The minimum partition key accessible with this shared access + signature. startpk must accompany startrk. Key values are inclusive. + If omitted, there is no lower bound on the table entities that can + be accessed. + :param str start_rk: + The minimum row key accessible with this shared access signature. + startpk must accompany startrk. Key values are inclusive. If + omitted, there is no lower bound on the table entities that can be + accessed. + :param str end_pk: + The maximum partition key accessible with this shared access + signature. endpk must accompany endrk. Key values are inclusive. If + omitted, there is no upper bound on the table entities that can be + accessed. + :param str end_rk: + The maximum row key accessible with this shared access signature. + endpk must accompany endrk. Key values are inclusive. If omitted, + there is no upper bound on the table entities that can be accessed. + """ + sas = _TableSharedAccessHelper() + sas.add_base( + permission, expiry, start, ip_address_or_range, protocol, X_MS_VERSION + ) + sas.add_id(policy_id) + sas.add_table_access_ranges(table_name, start_pk, start_rk, end_pk, end_rk) + + # Table names must be signed lower case + resource_path = table_name.lower() + sas.add_resource_signature( + self.account_name, self.account_key, "table", resource_path + ) + + return sas.get_token() + + +class _TableQueryStringConstants(QueryStringConstants): + TABLE_NAME = "tn" + + +class _TableSharedAccessHelper(_SharedAccessHelper): + def __init__(self): + super(_TableSharedAccessHelper, self).__init__() + self.query_dict = {} + + def add_table_access_ranges(self, table_name, start_pk, start_rk, end_pk, end_rk): + self._add_query(_TableQueryStringConstants.TABLE_NAME, table_name) + self._add_query(_TableQueryStringConstants.START_PK, start_pk) + self._add_query(_TableQueryStringConstants.START_RK, start_rk) + self._add_query(_TableQueryStringConstants.END_PK, end_pk) + self._add_query(_TableQueryStringConstants.END_RK, end_rk) + + def add_resource_signature(self, account_name, account_key, service, path): + def get_value_to_append(query): + return_value = self.query_dict.get(query) or "" + return return_value + "\n" + + if path[0] != "/": + path = "/" + path + + canonicalized_resource = "/" + service + "/" + account_name + path + "\n" + + # Form the string to sign from shared_access_policy and canonicalized + # resource. The order of values is important. + string_to_sign = ( + get_value_to_append(QueryStringConstants.SIGNED_PERMISSION) + + get_value_to_append(QueryStringConstants.SIGNED_START) + + get_value_to_append(QueryStringConstants.SIGNED_EXPIRY) + + canonicalized_resource + + get_value_to_append(QueryStringConstants.SIGNED_IDENTIFIER) + + get_value_to_append(QueryStringConstants.SIGNED_IP) + + get_value_to_append(QueryStringConstants.SIGNED_PROTOCOL) + + get_value_to_append(QueryStringConstants.SIGNED_VERSION) + ) + + string_to_sign += ( + get_value_to_append(QueryStringConstants.START_PK) + + get_value_to_append(QueryStringConstants.START_RK) + + get_value_to_append(QueryStringConstants.END_PK) + + get_value_to_append(QueryStringConstants.END_RK) + ) + + # remove the trailing newline + if string_to_sign[-1] == "\n": + string_to_sign = string_to_sign[:-1] + + self._add_query( + QueryStringConstants.SIGNED_SIGNATURE, + _sign_string(account_key, string_to_sign), + ) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_version.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_version.py new file mode 100644 index 000000000000..1aff1291cc52 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/_version.py @@ -0,0 +1,7 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +VERSION = "12.1.0" diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/__init__.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/__init__.py new file mode 100644 index 000000000000..7b3856f8848b --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/__init__.py @@ -0,0 +1,13 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from azure.data.tables.aio._table_client_async import TableClient +from azure.data.tables.aio._table_service_client_async import TableServiceClient + +__all__ = [ + "TableClient", + "TableServiceClient", +] diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_base_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_base_client_async.py new file mode 100644 index 000000000000..06b3136394d1 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_base_client_async.py @@ -0,0 +1,181 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + +from typing import Any, List, Mapping, Optional, Union, TYPE_CHECKING +from uuid import uuid4 + +from azure.core.credentials import AzureSasCredential, AzureNamedKeyCredential +from azure.core.pipeline.policies import ( + ContentDecodePolicy, + AsyncBearerTokenCredentialPolicy, + AsyncRedirectPolicy, + DistributedTracingPolicy, + HttpLoggingPolicy, + UserAgentPolicy, + ProxyPolicy, + AzureSasCredentialPolicy, + RequestIdPolicy, + CustomHookPolicy, + NetworkTraceLoggingPolicy, +) +from azure.core.pipeline.transport import ( + AsyncHttpTransport, + HttpRequest, +) + +from .._generated.aio import AzureTable +from .._base_client import AccountHostsMixin, get_api_version, extract_batch_part_metadata +from .._authentication import SharedKeyCredentialPolicy +from .._constants import STORAGE_OAUTH_SCOPE +from .._error import RequestTooLargeError, TableTransactionError, _decode_error +from .._policies import StorageHosts, StorageHeadersPolicy +from .._sdk_moniker import SDK_MONIKER +from ._policies_async import AsyncTablesRetryPolicy + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class AsyncTablesBaseClient(AccountHostsMixin): + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, + endpoint: str, + *, + credential: Optional[Union[AzureSasCredential, AzureNamedKeyCredential, "AsyncTokenCredential"]] = None, + **kwargs: Any + ) -> None: + super(AsyncTablesBaseClient, self).__init__(endpoint, credential=credential, **kwargs) # type: ignore + self._client = AzureTable( + self.url, + policies=kwargs.pop('policies', self._policies), + **kwargs + ) + self._client._config.version = get_api_version(kwargs, self._client._config.version) # pylint: disable=protected-access + + + async def __aenter__(self): + await self._client.__aenter__() + return self + + async def __aexit__(self, *args): + await self._client.__aexit__(*args) + + async def close(self) -> None: + """This method is to close the sockets opened by the client. + It need not be used when using with a context manager. + """ + await self._client.close() + + def _configure_credential(self, credential): + # type: (Any) -> None + if hasattr(credential, "get_token"): + self._credential_policy = AsyncBearerTokenCredentialPolicy( # type: ignore + credential, STORAGE_OAUTH_SCOPE + ) + elif isinstance(credential, SharedKeyCredentialPolicy): + self._credential_policy = credential # type: ignore + elif isinstance(credential, AzureSasCredential): + self._credential_policy = AzureSasCredentialPolicy(credential) # type: ignore + elif isinstance(credential, AzureNamedKeyCredential): + self._credential_policy = SharedKeyCredentialPolicy(credential) # type: ignore + elif credential is not None: + raise TypeError("Unsupported credential: {}".format(credential)) + + def _configure_policies(self, **kwargs): + return [ + RequestIdPolicy(**kwargs), + StorageHeadersPolicy(**kwargs), + UserAgentPolicy(sdk_moniker=SDK_MONIKER, **kwargs), + ProxyPolicy(**kwargs), + self._credential_policy, + ContentDecodePolicy(response_encoding="utf-8"), + AsyncRedirectPolicy(**kwargs), + StorageHosts(**kwargs), + AsyncTablesRetryPolicy(**kwargs), + CustomHookPolicy(**kwargs), + NetworkTraceLoggingPolicy(**kwargs), + DistributedTracingPolicy(**kwargs), + HttpLoggingPolicy(**kwargs), + ] + + async def _batch_send(self, *reqs: "HttpRequest", **kwargs) -> List[Mapping[str, Any]]: + """Given a series of request, do a Storage batch call.""" + # Pop it here, so requests doesn't feel bad about additional kwarg + policies = [StorageHeadersPolicy()] + + changeset = HttpRequest("POST", None) # type: ignore + changeset.set_multipart_mixed( + *reqs, policies=policies, boundary="changeset_{}".format(uuid4()) + ) + request = self._client._client.post( # pylint: disable=protected-access + url="https://{}/$batch".format(self._primary_hostname), + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": "3.0", + "MaxDataServiceVersion": "3.0;NetFx", + "Content-Type": "application/json", + "Accept": "application/json" + }, + ) + request.set_multipart_mixed( + changeset, + policies=policies, + enforce_https=False, + boundary="batch_{}".format(uuid4()), + ) + + pipeline_response = await self._client._client._pipeline.run(request, **kwargs) # pylint: disable=protected-access + response = pipeline_response.http_response + # TODO: Check for proper error model deserialization + if response.status_code == 413: + raise _decode_error( + response, + error_message="The transaction request was too large", + error_type=RequestTooLargeError) + if response.status_code != 202: + raise _decode_error(response) + + parts_iter = response.parts() + parts = [] + async for p in parts_iter: + parts.append(p) + error_parts = [p for p in parts if not 200 <= p.status_code < 300] + if any(error_parts): + if error_parts[0].status_code == 413: + raise _decode_error( + response, + error_message="The transaction request was too large", + error_type=RequestTooLargeError) + raise _decode_error( + response=error_parts[0], + error_type=TableTransactionError, + ) + return [extract_batch_part_metadata(p) for p in parts] + + +class AsyncTransportWrapper(AsyncHttpTransport): + """Wrapper class that ensures that an inner client created + by a `get_client` method does not close the outer transport for the parent + when used in a context manager. + """ + def __init__(self, async_transport): + self._transport = async_transport + + async def send(self, request, **kwargs): + return await self._transport.send(request, **kwargs) + + async def open(self): + pass + + async def close(self): + pass + + async def __aenter__(self): + pass + + async def __aexit__(self, *args): # pylint: disable=arguments-differ + pass diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_models.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_models.py new file mode 100644 index 000000000000..1a27a24ddea4 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_models.py @@ -0,0 +1,116 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from azure.core.exceptions import HttpResponseError +from azure.core.async_paging import AsyncPageIterator + +from .._deserialize import ( + _return_context_and_deserialized, + _convert_to_entity, + _extract_continuation_token, +) +from .._generated.models import QueryOptions +from .._models import TableItem +from .._error import _process_table_error +from .._constants import NEXT_PARTITION_KEY, NEXT_TABLE_NAME, NEXT_ROW_KEY + + +class TablePropertiesPaged(AsyncPageIterator): + """An iterable of Table properties. + + :param callable command: Function to retrieve the next page of items. + :keyword int results_per_page: The maximum number of results retrieved per API call. + :keyword str filter: The filter to apply to results. + :keyword str continuation_token: An opaque continuation token. + """ + + def __init__(self, command, **kwargs): + super(TablePropertiesPaged, self).__init__( + self._get_next_cb, + self._extract_data_cb, + continuation_token=kwargs.get("continuation_token") or "", + ) + self._command = command + self._headers = None + self._response = None + self.results_per_page = kwargs.get("results_per_page") + self.filter = kwargs.get("filter") + self._location_mode = None + + async def _get_next_cb(self, continuation_token, **kwargs): + query_options = QueryOptions(top=self.results_per_page, filter=self.filter) + try: + return await self._command( + query_options=query_options, + next_table_name=continuation_token or None, + cls=kwargs.pop("cls", None) or _return_context_and_deserialized, + use_location=self._location_mode, + ) + except HttpResponseError as error: + _process_table_error(error) + + async def _extract_data_cb(self, get_next_return): + self._location_mode, self._response, self._headers = get_next_return + props_list = [ + TableItem._from_generated(t, **self._headers) for t in self._response.value # pylint: disable=protected-access + ] + return self._headers[NEXT_TABLE_NAME] or None, props_list + + +class TableEntityPropertiesPaged(AsyncPageIterator): + """An iterable of TableEntity properties. + + :param callable command: Function to retrieve the next page of items. + :param str table: The name of the table. + :keyword int results_per_page: The maximum number of results retrieved per API call. + :keyword str filter: The filter to apply to results. + :keyword str select: The select filter to apply to results. + :keyword str continuation_token: An opaque continuation token. + """ + + def __init__(self, command, table, **kwargs): + super(TableEntityPropertiesPaged, self).__init__( + self._get_next_cb, + self._extract_data_cb, + continuation_token=kwargs.get("continuation_token") or {}, + ) + self._command = command + self._headers = None + self._response = None + self.table = table + self.results_per_page = kwargs.get("results_per_page") + self.filter = kwargs.get("filter") + self.select = kwargs.get("select") + self._location_mode = None + + async def _get_next_cb(self, continuation_token, **kwargs): + next_partition_key, next_row_key = _extract_continuation_token( + continuation_token + ) + query_options = QueryOptions( + top=self.results_per_page, select=self.select, filter=self.filter + ) + try: + return await self._command( + query_options=query_options, + next_row_key=next_row_key, + next_partition_key=next_partition_key, + table=self.table, + cls=kwargs.pop("cls", _return_context_and_deserialized), + use_location=self._location_mode, + ) + except HttpResponseError as error: + _process_table_error(error) + + async def _extract_data_cb(self, get_next_return): + self._location_mode, self._response, self._headers = get_next_return + props_list = [_convert_to_entity(t) for t in self._response.value] + next_entity = {} + if self._headers[NEXT_PARTITION_KEY] or self._headers[NEXT_ROW_KEY]: + next_entity = { + "PartitionKey": self._headers[NEXT_PARTITION_KEY], + "RowKey": self._headers[NEXT_ROW_KEY], + } + return next_entity or None, props_list diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_policies_async.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_policies_async.py new file mode 100644 index 000000000000..96139f7c5b4e --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_policies_async.py @@ -0,0 +1,155 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import time + +from azure.core.pipeline.policies import AsyncRetryPolicy +from azure.core.exceptions import ( + AzureError, + ClientAuthenticationError, + ServiceRequestError +) + +from .._models import LocationMode +from .._policies import set_next_host_location + + +class AsyncTablesRetryPolicy(AsyncRetryPolicy): + """A retry policy. + + The retry policy in the pipeline can be configured directly, or tweaked on a per-call basis. + + :keyword bool retry_to_secondary: Whether to allow retrying to the secondary fail-over host + location. Default value is False. + + :keyword int retry_total: Total number of retries to allow. Takes precedence over other counts. + Default value is 10. + + :keyword int retry_connect: How many connection-related errors to retry on. + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. Default value is 3. + + :keyword int retry_read: How many times to retry on read errors. + These errors are raised after the request was sent to the server, so the + request may have side-effects. Default value is 3. + + :keyword int retry_status: How many times to retry on bad status codes. Default value is 3. + + :keyword float retry_backoff_factor: A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a delay). + In fixed mode, retry policy will alwasy sleep for {backoff factor}. + In 'exponential' mode, retry policy will sleep for: `{backoff factor} * (2 ** ({number of total retries} - 1))` + seconds. If the backoff_factor is 0.1, then the retry will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. The default value is 0.8. + + :keyword int retry_backoff_max: The maximum back off time. Default value is 120 seconds (2 minutes). + + :keyword RetryMode retry_mode: Fixed or exponential delay between attemps, default is exponential. + + :keyword int timeout: Timeout setting for the operation in seconds, default is 604800s (7 days). + """ + + def __init__(self, **kwargs): + super(AsyncTablesRetryPolicy, self).__init__(**kwargs) + self.retry_to_secondary = kwargs.get('retry_to_secondary', False) + + def is_retry(self, settings, response): + """Is this method/status code retryable? (Based on whitelists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + should_retry = super(AsyncTablesRetryPolicy, self).is_retry(settings, response) + status = response.http_response.status_code + if status == 404 and settings['mode'] == LocationMode.SECONDARY: + # Response code 404 should be retried if secondary was used. + return True + return should_retry + + def configure_retries(self, options): + """Configures the retry settings. + + :param options: keyword arguments from context. + :return: A dict containing settings and history for retries. + :rtype: dict + """ + config = super(AsyncTablesRetryPolicy, self).configure_retries(options) + config["retry_secondary"] = options.pop("retry_to_secondary", self.retry_to_secondary) + config["mode"] = options.pop("location_mode", LocationMode.PRIMARY) + config["hosts"] = options.pop("hosts", None) + return config + + def update_context(self, context, retry_settings): + """Updates retry history in pipeline context. + + :param context: The pipeline context. + :type context: ~azure.core.pipeline.PipelineContext + :param retry_settings: The retry settings. + :type retry_settings: dict + """ + super(AsyncTablesRetryPolicy, self).update_context(context, retry_settings) + context['location_mode'] = retry_settings['mode'] + + def update_request(self, request, retry_settings): # pylint: disable=no-self-use + """Updates the pipeline request before attempting to retry. + + :param PipelineRequest request: The outgoing request. + :param dict(str, Any) retry_settings: The current retry context settings. + """ + set_next_host_location(retry_settings, request) + + async def send(self, request): + """Uses the configured retry policy to send the request to the next policy in the pipeline. + + :param request: The PipelineRequest object + :type request: ~azure.core.pipeline.PipelineRequest + :return: Returns the PipelineResponse or raises error if maximum retries exceeded. + :rtype: ~azure.core.pipeline.PipelineResponse + :raise: ~azure.core.exceptions.AzureError if maximum retries exceeded. + :raise: ~azure.core.exceptions.ClientAuthenticationError if authentication fails + """ + retry_active = True + response = None + retry_settings = self.configure_retries(request.context.options) + absolute_timeout = retry_settings['timeout'] + is_response_error = True + + while retry_active: + try: + start_time = time.time() + self._configure_timeout(request, absolute_timeout, is_response_error) + response = await self.next.send(request) + if self.is_retry(retry_settings, response): + retry_active = self.increment(retry_settings, response=response) + if retry_active: + self.update_request(request, retry_settings) + await self.sleep(retry_settings, request.context.transport, response=response) + is_response_error = True + continue + break + except ClientAuthenticationError: # pylint:disable=try-except-raise + # the authentication policy failed such that the client's request can't + # succeed--we'll never have a response to it, so propagate the exception + raise + except AzureError as err: + if self._is_method_retryable(retry_settings, request.http_request): + retry_active = self.increment(retry_settings, response=request, error=err) + if retry_active: + self.update_request(request, retry_settings) + await self.sleep(retry_settings, request.context.transport) + if isinstance(err, ServiceRequestError): + is_response_error = False + else: + is_response_error = True + continue + raise err + finally: + end_time = time.time() + if absolute_timeout: + absolute_timeout -= (end_time - start_time) + + self.update_context(response.context, retry_settings) + return response diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_batch_async.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_batch_async.py new file mode 100644 index 000000000000..4c8e590cdafa --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_batch_async.py @@ -0,0 +1,649 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +from typing import Dict, Any, Optional, Union, TYPE_CHECKING +import msrest + +from azure.core import MatchConditions + +from .._common_conversion import _transform_patch_to_cosmos_post +from .._models import UpdateMode +from .._entity import TableEntity +from .._table_batch import EntityType +from .._serialize import ( + _get_match_headers, + _add_entity_properties, +) + +from .._generated.aio._azure_table import AzureTable +from .._generated.aio._configuration import AzureTableConfiguration + +if TYPE_CHECKING: + from .._generated import models + + +class TableBatchOperations(object): + """ + This is the class that is used for batch operations for the data tables + service. + + The Tables service supports batch transactions on entities that are in the + same table and belong to the same partition group. Multiple operations are + supported within a single transaction. The batch can include at most 100 + entities, and its total payload may be no more than 4 MB in size. + + """ + + def __init__( + self, + client: AzureTable, + serializer: msrest.Serializer, + deserializer: msrest.Deserializer, + config: AzureTableConfiguration, + table_name: str, + is_cosmos_endpoint: bool = False, + **kwargs: Dict[str, Any] + ) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + self._is_cosmos_endpoint = is_cosmos_endpoint + self.table_name = table_name + + self._partition_key = kwargs.pop("partition_key", None) + self.requests = [] # type: ignore + + def __len__(self): + return len(self.requests) + + def _verify_partition_key( + self, entity: EntityType + ) -> None: + if self._partition_key is None: + self._partition_key = entity["PartitionKey"] + elif entity["PartitionKey"] != self._partition_key: + raise ValueError("Partition Keys must all be the same") + + def create( + self, + entity: EntityType, + **kwargs + ) -> None: + """Insert entity in a table. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :return: None + :rtype: None + :raises ValueError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + + if "PartitionKey" in temp and "RowKey" in temp: + temp = _add_entity_properties(temp) + else: + raise ValueError("PartitionKey and/or RowKey were not provided in entity") + self._batch_create_entity(table=self.table_name, entity=temp, **kwargs) + + def _batch_create_entity( + self, + table: str, + entity: EntityType, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + response_preference: Optional[Union[str, "models.ResponseFormat"]] = "return-no-content", + query_options: Optional["models.QueryOptions"] = None, + **kwargs: Any + ) -> None: + """ + Adds an insert operation to the batch. See + :func:`azure.data.tables.TableClient.insert_entity` for more information + on insert operations. + + The operation will not be executed until the batch is committed + + :param: table: + The table to perform the operation on + :type: table: str + :param: entity: + The entity to insert. Can be a dict or an entity object + Must contain a PartitionKey and a RowKey. + :type: entity: dict or :class:`~azure.data.tables.models.Entity` + """ + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json;odata=nometadata") + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self._batch_create_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + if response_preference is not None: + header_parameters["Prefer"] = self._serialize.header( + "response_preference", response_preference, "str" + ) + header_parameters["Content-Type"] = self._serialize.header( + "content_type", content_type, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + + body_content_kwargs = {} # type: Dict[str, Any] + if entity is not None: + body_content = self._serialize.body(entity, "{object}") + else: + body_content = None + body_content_kwargs["content"] = body_content + request = self._client._client.post( # pylint: disable=protected-access + url, query_parameters, header_parameters, **body_content_kwargs + ) + self.requests.append(request) + + _batch_create_entity.metadata = {"url": "/{table}"} # type: ignore + + def update( + self, + entity: EntityType, + mode: Union[str, UpdateMode] = UpdateMode.MERGE, + **kwargs: Any + ) -> None: + """Adds an update operation to the current batch. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :keyword str etag: Etag of the entity + :keyword match_condition: MatchCondition + :paramtype match_condition: ~azure.core.MatchCondition + :return: None + :rtype: None + :raises ValueError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_batching_async.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and not etag: + try: + etag = entity.metadata.get("etag", None) # type: ignore + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + partition_key = temp["PartitionKey"] + row_key = temp["RowKey"] + temp = _add_entity_properties(temp) + if mode is UpdateMode.REPLACE: + self._batch_update_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + table_entity_properties=temp, + **kwargs + ) + elif mode is UpdateMode.MERGE: + self._batch_merge_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + table_entity_properties=temp, + **kwargs + ) + + def _batch_update_entity( + self, + table: str, + partition_key: str, + row_key: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + if_match: Optional[str] = None, + table_entity_properties: Optional[EntityType] = None, + query_options: Optional["models.QueryOptions"] = None, + **kwargs: Any + ) -> None: + """Update entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a replace will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + """ + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._batch_update_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + "partitionKey": self._serialize.url("partition_key", partition_key, "str"), + "rowKey": self._serialize.url("row_key", row_key, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + if if_match is not None: + header_parameters["If-Match"] = self._serialize.header( + "if_match", if_match, "str" + ) + header_parameters["Content-Type"] = self._serialize.header( + "content_type", content_type, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, "{object}") + else: + body_content = None + body_content_kwargs["content"] = body_content + request = self._client._client.put( # pylint: disable=protected-access + url, query_parameters, header_parameters, **body_content_kwargs + ) + self.requests.append(request) + + _batch_update_entity.metadata = { # type: ignore + "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" + } + + def _batch_merge_entity( + self, + table: str, + partition_key: str, + row_key: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + if_match: Optional[str] = None, + table_entity_properties: Optional[EntityType] = None, + query_options: Optional["models.QueryOptions"] = None, + **kwargs + ) -> None: + """Merge entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param if_match: Match condition for an entity to be updated. If specified and a matching + entity is not found, an error will be raised. To force an unconditional update, set to the + wildcard character (*). If not specified, an insert will be performed when no existing entity + is found to update and a merge will be performed if an existing entity is found. + :type if_match: str + :param table_entity_properties: The properties for the table entity. + :type table_entity_properties: dict[str, object] + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + """ + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self._batch_merge_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + "partitionKey": self._serialize.url("partition_key", partition_key, "str"), + "rowKey": self._serialize.url("row_key", row_key, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + if if_match is not None: + header_parameters["If-Match"] = self._serialize.header( + "if_match", if_match, "str" + ) + header_parameters["Content-Type"] = self._serialize.header( + "content_type", content_type, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + + body_content_kwargs = {} # type: Dict[str, Any] + if table_entity_properties is not None: + body_content = self._serialize.body(table_entity_properties, "{object}") + else: + body_content = None + body_content_kwargs["content"] = body_content + request = self._client._client.patch( # pylint: disable=protected-access + url, query_parameters, header_parameters, **body_content_kwargs + ) + if self._is_cosmos_endpoint: + _transform_patch_to_cosmos_post(request) + self.requests.append(request) + + _batch_merge_entity.metadata = { # type: ignore + "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" + } + + def delete( + self, + entity: EntityType, + **kwargs + ) -> None: + """Deletes the specified entity in a table. + + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :keyword str etag: Etag of the entity + :keyword match_condition: MatchCondition + :paramtype match_condition: ~azure.core.MatchCondition + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + partition_key = temp["PartitionKey"] + row_key = temp["RowKey"] + + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and not etag: + try: + etag = entity.metadata.get("etag", None) # type: ignore + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + self._batch_delete_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + **kwargs + ) + + def _batch_delete_entity( + self, + table: str, + partition_key: str, + row_key: str, + if_match: str, + timeout: Optional[int] = None, + request_id_parameter: Optional[str] = None, + query_options: Optional["models.QueryOptions"] = None, + ) -> None: + """Deletes the specified entity in a table. + + :param table: The name of the table. + :type table: str + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :param if_match: Match condition for an entity to be deleted. If specified and a matching + entity is not found, an error will be raised. To force an unconditional delete, set to the + wildcard character (*). + :type if_match: str + :param timeout: The timeout parameter is expressed in seconds. + :type timeout: int + :param request_id_parameter: Provides a client-generated, opaque value with a 1 KB character + limit that is recorded in the analytics logs when analytics logging is enabled. + :type request_id_parameter: str + :param query_options: Parameter group. + :type query_options: ~azure.data.tables.models.QueryOptions + """ + + _format = None + if query_options is not None: + _format = query_options.format + data_service_version = "3.0" + accept = "application/json;odata=minimalmetadata" + + # Construct URL + url = self._batch_delete_entity.metadata["url"] # type: ignore + path_format_arguments = { + "url": self._serialize.url( + "self._config.url", self._config.url, "str", skip_quote=True + ), + "table": self._serialize.url("table", table, "str"), + "partitionKey": self._serialize.url("partition_key", partition_key, "str"), + "rowKey": self._serialize.url("row_key", row_key, "str"), + } + url = self._client._client.format_url( # pylint: disable=protected-access + url, **path_format_arguments + ) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if timeout is not None: + query_parameters["timeout"] = self._serialize.query( + "timeout", timeout, "int", minimum=0 + ) + if _format is not None: + query_parameters["$format"] = self._serialize.query( + "format", _format, "str" + ) + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters["x-ms-version"] = self._serialize.header( + "self._config.version", self._config.version, "str" + ) + if request_id_parameter is not None: + header_parameters["x-ms-client-request-id"] = self._serialize.header( + "request_id_parameter", request_id_parameter, "str" + ) + header_parameters["DataServiceVersion"] = self._serialize.header( + "data_service_version", data_service_version, "str" + ) + header_parameters["If-Match"] = self._serialize.header( + "if_match", if_match, "str" + ) + header_parameters["Accept"] = self._serialize.header("accept", accept, "str") + + request = self._client._client.delete( # pylint: disable=protected-access + url, query_parameters, header_parameters + ) + self.requests.append(request) + + _batch_delete_entity.metadata = { # type: ignore + "url": "/{table}(PartitionKey='{partitionKey}',RowKey='{rowKey}')" + } + + def upsert( + self, + entity: EntityType, + mode: Union[str, UpdateMode] = UpdateMode.MERGE, + **kwargs + ) -> None: + """Update/Merge or Insert entity into table. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :return: None + :rtype: None + :raises ValueError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_batching.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Creating and adding an entity to a Table + """ + self._verify_partition_key(entity) + temp = entity.copy() # type: ignore + + partition_key = temp["PartitionKey"] + row_key = temp["RowKey"] + temp = _add_entity_properties(temp) + + if mode is UpdateMode.MERGE: + self._batch_merge_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=temp, + **kwargs + ) + elif mode is UpdateMode.REPLACE: + self._batch_update_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=temp, + **kwargs + ) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_client_async.py new file mode 100644 index 000000000000..4513b657d9ef --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_client_async.py @@ -0,0 +1,709 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import functools +from typing import List, Union, Any, Optional, Mapping, Iterable, Dict, overload, cast, TYPE_CHECKING +try: + from urllib.parse import urlparse, unquote +except ImportError: + from urlparse import urlparse # type: ignore + from urllib2 import unquote # type: ignore + +from azure.core import MatchConditions +from azure.core.credentials import AzureNamedKeyCredential, AzureSasCredential +from azure.core.async_paging import AsyncItemPaged +from azure.core.exceptions import HttpResponseError +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async + +from .._base_client import parse_connection_str +from .._entity import TableEntity +from .._generated.models import SignedIdentifier, TableProperties, QueryOptions +from .._models import TableAccessPolicy, TableItem +from .._serialize import serialize_iso, _parameter_filter_substitution +from .._deserialize import deserialize_iso, _return_headers_and_deserialized +from .._error import ( + _process_table_error, + _validate_table_name, + _decode_error, + _reraise_error +) +from .._models import UpdateMode +from .._deserialize import _convert_to_entity, _trim_service_metadata +from .._serialize import _add_entity_properties, _get_match_headers +from .._table_client import EntityType, TransactionOperationType +from ._base_client_async import AsyncTablesBaseClient +from ._models import TableEntityPropertiesPaged +from ._table_batch_async import TableBatchOperations + +if TYPE_CHECKING: + from azure.core.credentials_async import AsyncTokenCredential + + +class TableClient(AsyncTablesBaseClient): + """A client to interact with a specific Table in an Azure Tables account. + + :ivar str account_name: The name of the Tables account. + :ivar str table_name: The name of the table. + :ivar str url: The full URL to the Tables account. + """ + + def __init__( # pylint: disable=missing-client-constructor-parameter-credential + self, + endpoint: str, + table_name: str, + *, + credential: Optional[Union[AzureSasCredential, AzureNamedKeyCredential, "AsyncTokenCredential"]] = None, + **kwargs + ) -> None: + """Create TableClient from a Credential. + + :param str endpoint: A URL to an Azure Tables account. + :param str table_name: The table name. + :keyword credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core), + AzureSasCredential (azure-core), or TokenCredentials from azure-identity. + :paramtype credential: + :class:`~azure.core.credentials.AzureNamedKeyCredential` or + :class:`~azure.core.credentials.AzureSasCredential` or + :class:`~azure.core.credentials.TokenCredential` + + :returns: None + """ + if not table_name: + raise ValueError("Please specify a table name.") + _validate_table_name(table_name) + self.table_name = table_name + super(TableClient, self).__init__(endpoint, credential=credential, **kwargs) + + def _format_url(self, hostname): + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) + + @classmethod + def from_connection_string( + cls, + conn_str: str, + table_name: str, + **kwargs + ) -> 'TableClient': + """Create TableClient from a Connection string. + + :param str conn_str: A connection string to an Azure Tables account. + :param str table_name: The table name. + :returns: A table client. + :rtype: :class:`~azure.data.tables.TableClient` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_create_client_async.py + :start-after: [START create_table_client] + :end-before: [END create_table_client] + :language: python + :dedent: 8 + :caption: Creating the TableClient from a connection string. + """ + endpoint, credential = parse_connection_str( + conn_str=conn_str, credential=None, keyword_args=kwargs + ) + return cls(endpoint, table_name=table_name, credential=credential, **kwargs) + + @classmethod + def from_table_url( + cls, + table_url: str, + **kwargs + ) -> 'TableClient': + """A client to interact with a specific Table. + + :param str table_url: The full URI to the table, including SAS token if used. + :keyword credential: + The credentials with which to authenticate. This is optional if the + table URL already has a SAS token. The value can be one of AzureNamedKeyCredential + or AzureSasCredential from azure-core. + :paramtype credential: + :class:`~azure.core.credentials.AzureNamedKeyCredential` or + :class:`~azure.core.credentials.AzureSasCredential` + :returns: A table client. + :rtype: :class:`~azure.data.tables.TableClient` + """ + try: + if not table_url.lower().startswith("http"): + table_url = "https://" + table_url + except AttributeError: + raise ValueError("Table URL must be a string.") + parsed_url = urlparse(table_url.rstrip("/")) + + if not parsed_url.netloc: + raise ValueError("Invalid URL: {}".format(table_url)) + + table_path = parsed_url.path.lstrip("/").split("/") + account_path = "" + if len(table_path) > 1: + account_path = "/" + "/".join(table_path[:-1]) + endpoint = "{}://{}{}?{}".format( + parsed_url.scheme, + parsed_url.netloc.rstrip("/"), + account_path, + parsed_url.query, + ) + table_name = unquote(table_path[-1]) + if table_name.lower().startswith("tables('"): + table_name = table_name[8:-2] + if not table_name: + raise ValueError( + "Invalid URL. Please provide a URL with a valid table name" + ) + return cls(endpoint, table_name=table_name, **kwargs) + + @distributed_trace_async + async def get_table_access_policy(self, **kwargs) -> Mapping[str, Optional[TableAccessPolicy]]: + """ + Retrieves details about any stored access policies specified on the table that may be + used with Shared Access Signatures. + + :return: Dictionary of SignedIdentifiers + :rtype: Dict[str, Optional[:class:`~azure.data.tables.TableAccessPolicy`]] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + timeout = kwargs.pop("timeout", None) + try: + _, identifiers = await self._client.table.get_access_policy( + table=self.table_name, + timeout=timeout, + cls=kwargs.pop("cls", None) or _return_headers_and_deserialized, + **kwargs + ) + except HttpResponseError as error: + _process_table_error(error) + output = {} # type: Dict[str, Optional[TableAccessPolicy]] + for identifier in cast(List[SignedIdentifier], identifiers): + if identifier.access_policy: + output[identifier.id] = TableAccessPolicy( + start=deserialize_iso(identifier.access_policy.start), + expiry=deserialize_iso(identifier.access_policy.expiry), + permission=identifier.access_policy.permission + ) + else: + output[identifier.id] = None + return output + + @distributed_trace_async + async def set_table_access_policy( + self, + signed_identifiers: Mapping[str, Optional[TableAccessPolicy]], + **kwargs + ) -> None: + """Sets stored access policies for the table that may be used with Shared Access Signatures. + + :param signed_identifiers: Access policies to set for the table + :type signed_identifiers: Dict[str, :class:`~azure.data.tables.TableAccessPolicy`] + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + identifiers = [] + for key, value in signed_identifiers.items(): + payload = None + if value: + payload = TableAccessPolicy( + start=serialize_iso(value.start), + expiry=serialize_iso(value.expiry), + permission=value.permission + ) + identifiers.append(SignedIdentifier(id=key, access_policy=payload)) + try: + await self._client.table.set_access_policy( + table=self.table_name, table_acl=identifiers or None, **kwargs # type: ignore + ) + except HttpResponseError as error: + try: + _process_table_error(error) + except HttpResponseError as table_error: + if (table_error.error_code == 'InvalidXmlDocument' # type: ignore + and len(identifiers) > 5): + raise ValueError( + 'Too many access policies provided. The server does not support setting ' + 'more than 5 access policies on a single resource.' + ) + raise + + @distributed_trace_async + async def create_table(self, **kwargs) -> TableItem: + """Creates a new table under the given account. + + :return: A TableItem representing the created table. + :rtype: :class:`~azure.data.tables.TableItem` + :raises: :class:`~azure.core.exceptions.ResourceExistsError` If the entity already exists + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_create_delete_table_async.py + :start-after: [START create_table] + :end-before: [END create_table] + :language: python + :dedent: 8 + :caption: Creating a table from the TableClient object. + """ + table_properties = TableProperties(table_name=self.table_name) + try: + result = await self._client.table.create(table_properties, **kwargs) + except HttpResponseError as error: + _process_table_error(error) + return TableItem(name=result.table_name) # type: ignore + + @distributed_trace_async + async def delete_table(self, **kwargs) -> None: + """Deletes the table under the current account. No error will be raised if + the given table name is not found. + + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_create_delete_table_async.py + :start-after: [START delete_from_table_client] + :end-before: [END delete_from_table_client] + :language: python + :dedent: 8 + :caption: Deleting a table from the TableClient object. + """ + try: + await self._client.table.delete(table=self.table_name, **kwargs) + except HttpResponseError as error: + if error.status_code == 404: + return + _process_table_error(error) + + @overload + async def delete_entity(self, partition_key: str, row_key: str, **kwargs: Any) -> None: + ... + + @overload + async def delete_entity(self, entity: Union[TableEntity, Mapping[str, Any]], **kwargs: Any) -> None: + ... + + @distributed_trace_async + async def delete_entity(self, *args: Union[TableEntity, str], **kwargs: Any) -> None: + """Deletes the specified entity in a table. No error will be raised if + the entity or PartitionKey-RowKey pairing is not found. + + :param str partition_key: The partition key of the entity. + :param str row_key: The row key of the entity. + :param entity: The entity to delete + :type entity: Union[TableEntity, Mapping[str, str]] + :keyword str etag: Etag of the entity + :keyword match_condition: The condition under which to perform the operation. + Supported values include: MatchConditions.IfNotModified, MatchConditions.Unconditionally. + The default value is Unconditionally. + :paramtype match_condition: ~azure.core.MatchConditions + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_insert_delete_entities_async.py + :start-after: [START delete_entity] + :end-before: [END delete_entity] + :language: python + :dedent: 8 + :caption: Adding an entity to a Table + """ + try: + entity = kwargs.pop('entity', None) + if not entity: + entity = args[0] + partition_key = entity['PartitionKey'] + row_key = entity['RowKey'] + except (TypeError, IndexError): + partition_key = kwargs.pop('partition_key', None) + if not partition_key: + partition_key = args[0] + row_key = kwargs.pop("row_key", None) + if not row_key: + row_key = args[1] + + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and entity and not etag: + try: + etag = entity.metadata.get("etag", None) # type: ignore + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + try: + await self._client.table.delete_entity( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + **kwargs + ) + except HttpResponseError as error: + if error.status_code == 404: + return + _process_table_error(error) + + @distributed_trace_async + async def create_entity( + self, + entity: EntityType, + **kwargs + ) -> Mapping[str, Any]: + """Insert entity in a table. + + :param entity: The properties for the table entity. + :type entity: Union[TableEntity, Mapping[str, Any]] + :return: Dictionary mapping operation metadata returned from the service + :rtype: Dict[str,str] + :raises: :class:`~azure.core.exceptions.ResourceExistsError` If the entity already exists + + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_insert_delete_entities_async.py + :start-after: [START create_entity] + :end-before: [END create_entity] + :language: python + :dedent: 8 + :caption: Adding an entity to a Table + """ + entity = _add_entity_properties(entity) + try: + metadata, content = await self._client.table.insert_entity( # type: ignore + table=self.table_name, + table_entity_properties=entity, # type: ignore + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + except HttpResponseError as error: + decoded = _decode_error(error.response, error.message) + if decoded.error_code == "PropertiesNeedValue": + if entity.get("PartitionKey") is None: + raise ValueError("PartitionKey must be present in an entity") + if entity.get("RowKey") is None: + raise ValueError("RowKey must be present in an entity") + _reraise_error(error) + return _trim_service_metadata(metadata, content=content) # type: ignore + + + @distributed_trace_async + async def update_entity( + self, + entity: EntityType, + mode: Union[str, UpdateMode] = UpdateMode.MERGE, + **kwargs + ) -> Mapping[str, Any]: + """Update entity in a table. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :keyword str etag: Etag of the entity + :keyword match_condition: The condition under which to perform the operation. + Supported values include: MatchConditions.IfNotModified, MatchConditions.Unconditionally. + The default value is Unconditionally. + :paramtype match_condition: ~azure.core.MatchCondition + :return: Dictionary of operation metadata returned from service + :rtype: Dict[str,str] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_update_upsert_merge_entities_async.py + :start-after: [START update_entity] + :end-before: [END update_entity] + :language: python + :dedent: 16 + :caption: Querying entities from a TableClient + """ + match_condition = kwargs.pop("match_condition", None) + etag = kwargs.pop("etag", None) + if match_condition and entity and not etag: + try: + etag = entity.metadata.get("etag", None) # type: ignore + except (AttributeError, TypeError): + pass + if_match = _get_match_headers( + etag=etag, + match_condition=match_condition or MatchConditions.Unconditionally, + ) + + partition_key = entity["PartitionKey"] + row_key = entity["RowKey"] + entity = _add_entity_properties(entity) + try: + metadata = None + content = None + if mode is UpdateMode.REPLACE: + metadata, content = await self._client.table.update_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=entity, # type: ignore + if_match=if_match, + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + elif mode is UpdateMode.MERGE: + metadata, content = await self._client.table.merge_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + if_match=if_match, + cls=kwargs.pop("cls", _return_headers_and_deserialized), + table_entity_properties=entity, # type: ignore + **kwargs + ) + else: + raise ValueError("Mode type is not supported") + except HttpResponseError as error: + _process_table_error(error) + return _trim_service_metadata(metadata, content=content) # type: ignore + + @distributed_trace + def list_entities(self, **kwargs) -> AsyncItemPaged[TableEntity]: + """Lists entities in a table. + + :keyword int results_per_page: Number of entities returned per service request. + :keyword select: Specify desired properties of an entity to return. + :paramtype select: str or List[str] + :return: AsyncItemPaged[:class:`~azure.data.tables.TableEntity`] + :rtype: ~azure.core.async_paging.AsyncItemPaged[TableEntity] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_update_upsert_merge_entities_async.py + :start-after: [START list_entities] + :end-before: [END list_entities] + :language: python + :dedent: 16 + :caption: Querying entities from a TableClient + """ + user_select = kwargs.pop("select", None) + if user_select and not isinstance(user_select, str): + user_select = ",".join(user_select) + top = kwargs.pop("results_per_page", None) + + command = functools.partial(self._client.table.query_entities, **kwargs) + return AsyncItemPaged( + command, + table=self.table_name, + results_per_page=top, + select=user_select, + page_iterator_class=TableEntityPropertiesPaged, + ) + + @distributed_trace + def query_entities( + self, + query_filter: str, + **kwargs + ) -> AsyncItemPaged[TableEntity]: + """Lists entities in a table. + + :param str query_filter: Specify a filter to return certain entities + :keyword int results_per_page: Number of entities returned per service request. + :keyword select: Specify desired properties of an entity to return. + :paramtype select: str or List[str] + :keyword parameters: Dictionary for formatting query with additional, user defined parameters + :paramtype parameters: Dict[str, Any] + :return: AsyncItemPaged[:class:`~azure.data.tables.TableEntity`] + :rtype: ~azure.core.async_paging.AsyncItemPaged[TableEntity] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_query_table_async.py + :start-after: [START query_entities] + :end-before: [END query_entities] + :language: python + :dedent: 8 + :caption: Querying entities from a TableClient + """ + parameters = kwargs.pop("parameters", None) + query_filter = _parameter_filter_substitution( + parameters, query_filter + ) + top = kwargs.pop("results_per_page", None) + user_select = kwargs.pop("select", None) + if user_select and not isinstance(user_select, str): + user_select = ",".join(user_select) + + command = functools.partial(self._client.table.query_entities, **kwargs) + return AsyncItemPaged( + command, + table=self.table_name, + results_per_page=top, + filter=query_filter, + select=user_select, + page_iterator_class=TableEntityPropertiesPaged, + ) + + @distributed_trace_async + async def get_entity( + self, + partition_key: str, + row_key: str, + **kwargs + ) -> TableEntity: + """Get a single entity in a table. + + :param partition_key: The partition key of the entity. + :type partition_key: str + :param row_key: The row key of the entity. + :type row_key: str + :keyword select: Specify desired properties of an entity to return. + :paramtype select: str or List[str] + :return: Dictionary mapping operation metadata returned from the service + :rtype: :class:`~azure.data.tables.TableEntity` + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_update_upsert_merge_entities_async.py + :start-after: [START get_entity] + :end-before: [END get_entity] + :language: python + :dedent: 16 + :caption: Getting an entity from PartitionKey and RowKey + """ + user_select = kwargs.pop("select", None) + if user_select and not isinstance(user_select, str): + user_select = ",".join(user_select) + try: + entity = await self._client.table.query_entity_with_partition_and_row_key( + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + query_options=QueryOptions(select=user_select), + **kwargs + ) + properties = _convert_to_entity(entity) + except HttpResponseError as error: + _process_table_error(error) + return properties + + @distributed_trace_async + async def upsert_entity( + self, + entity: EntityType, + mode: Union[str, UpdateMode] = UpdateMode.MERGE, + **kwargs + ) -> Mapping[str, Any]: + """Update/Merge or Insert entity into table. + + :param entity: The properties for the table entity. + :type entity: :class:`~azure.data.tables.TableEntity` or Dict[str,str] + :param mode: Merge or Replace entity + :type mode: :class:`~azure.data.tables.UpdateMode` + :return: Dictionary mapping operation metadata returned from the service + :rtype: Dict[str,str] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_update_upsert_merge_entities_async.py + :start-after: [START upsert_entity] + :end-before: [END upsert_entity] + :language: python + :dedent: 16 + :caption: Update/Merge or Insert an entity into a table + """ + + partition_key = entity["PartitionKey"] + row_key = entity["RowKey"] + entity = _add_entity_properties(entity) + + try: + metadata = None + content = None + if mode is UpdateMode.MERGE: + metadata, content = await self._client.table.merge_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=entity, # type: ignore + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + elif mode is UpdateMode.REPLACE: + metadata, content = await self._client.table.update_entity( # type: ignore + table=self.table_name, + partition_key=partition_key, + row_key=row_key, + table_entity_properties=entity, # type: ignore + cls=kwargs.pop("cls", _return_headers_and_deserialized), + **kwargs + ) + else: + raise ValueError( + """Update mode {} is not supported. + For a list of supported modes see the UpdateMode enum""".format( + mode + ) + ) + except HttpResponseError as error: + _process_table_error(error) + return _trim_service_metadata(metadata, content=content) # type: ignore + + @distributed_trace_async + async def submit_transaction( + self, + operations: Iterable[TransactionOperationType], + **kwargs + ) -> List[Mapping[str, Any]]: + """Commit a list of operations as a single transaction. + + If any one of these operations fails, the entire transaction will be rejected. + + :param operations: The list of operations to commit in a transaction. This should be a list of + tuples containing an operation name, the entity on which to operate, and optionally, a dict of additional + kwargs for that operation. + :type operations: Iterable[Tuple[str, EntityType]] + :return: A list of mappings with response metadata for each operation in the transaction. + :rtype: List[Mapping[str, Any]] + :raises ~azure.data.tables.TableTransactionError: + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_batching_async.py + :start-after: [START batching] + :end-before: [END batching] + :language: python + :dedent: 8 + :caption: Using transactions to send multiple requests at once + """ + batched_requests = TableBatchOperations( + self._client, + self._client._serialize, # pylint: disable=protected-access + self._client._deserialize, # pylint: disable=protected-access + self._client._config, # pylint: disable=protected-access + self.table_name, + is_cosmos_endpoint=self._cosmos_endpoint, + **kwargs + ) + for operation in operations: + try: + operation_kwargs = operation[2] # type: ignore + except IndexError: + operation_kwargs = {} + try: + getattr(batched_requests, operation[0].lower())(operation[1], **operation_kwargs) + except AttributeError: + raise ValueError("Unrecognized operation: {}".format(operation)) + return await self._batch_send(*batched_requests.requests, **kwargs) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_service_client_async.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_service_client_async.py new file mode 100644 index 000000000000..ba5282a1f7ec --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/aio/_table_service_client_async.py @@ -0,0 +1,335 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- +import functools +from typing import ( + Optional, + Dict, + List, + TYPE_CHECKING +) + +from azure.core.async_paging import AsyncItemPaged +from azure.core.exceptions import HttpResponseError, ResourceExistsError +from azure.core.pipeline import AsyncPipeline +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async + +from .._base_client import parse_connection_str +from .._generated.models import TableServiceProperties +from .._models import service_stats_deserialize, service_properties_deserialize +from .._error import _process_table_error +from .._models import TableItem, LocationMode +from .._serialize import _parameter_filter_substitution +from ._table_client_async import TableClient +from ._base_client_async import AsyncTablesBaseClient, AsyncTransportWrapper +from ._models import TablePropertiesPaged + +if TYPE_CHECKING: + from .._models import TableCorsRule, TableMetrics, TableAnalyticsLogging + + +class TableServiceClient(AsyncTablesBaseClient): + """A client to interact with the Table Service at the account level. + + This client provides operations to retrieve and configure the account properties + as well as list, create and delete tables within the account. + For operations relating to a specific table, a client for this entity + can be retrieved using the :func:`~get_table_client` function. + + :ivar str account_name: The name of the Tables account. + :ivar str url: The full URL to the Tables account. + :param str endpoint: + The URL to the table service endpoint. Any other entities included + in the URL path (e.g. table) will be discarded. This URL can be optionally + authenticated with a SAS token. + :keyword credential: + The credentials with which to authenticate. This is optional if the + account URL already has a SAS token. The value can be one of AzureNamedKeyCredential (azure-core), + AzureSasCredential (azure-core), or TokenCredentials from azure-identity. + :paramtype credential: + :class:`~azure.core.credentials.AzureNamedKeyCredential` or + :class:`~azure.core.credentials.AzureSasCredential` or + :class:`~azure.core.credentials.TokenCredential` + :keyword str api_version: + The Storage API version to use for requests. Default value is '2019-02-02'. + Setting to an older version may result in reduced feature compatibility. + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_authentication_async.py + :start-after: [START auth_from_shared_key] + :end-before: [END auth_from_shared_key] + :language: python + :dedent: 8 + :caption: Creating the tableServiceClient with an account url and credential. + + .. literalinclude:: ../samples/async_samples/sample_authentication_async.py + :start-after: [START auth_by_sas] + :end-before: [END auth_by_sas] + :language: python + :dedent: 8 + :caption: Creating the tableServiceClient with Shared Access Signature. + """ + + def _format_url(self, hostname: str) -> str: + """Format the endpoint URL according to the current location + mode hostname. + """ + return "{}://{}{}".format(self.scheme, hostname, self._query_str) + + @classmethod + def from_connection_string(cls, conn_str: str, **kwargs) -> 'TableServiceClient': + """Create TableServiceClient from a Connection String. + + :param str conn_str: A connection string to an Azure Tables account. + :returns: A Table service client. + :rtype: :class:`~azure.data.tables.aio.TableServiceClient` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_authentication_async.py + :start-after: [START auth_from_connection_string] + :end-before: [END auth_from_connection_string] + :language: python + :dedent: 8 + :caption: Creating the tableServiceClient from a connection string + + """ + endpoint, credential = parse_connection_str( + conn_str=conn_str, credential=None, keyword_args=kwargs + ) + return cls(endpoint, credential=credential, **kwargs) + + @distributed_trace_async + async def get_service_stats(self, **kwargs) -> Dict[str, object]: + """Retrieves statistics related to replication for the Table service. It is only available on the secondary + location endpoint when read-access geo-redundant replication is enabled for the account. + + :return: Dictionary of service stats + :rtype: Dict[str, object] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + try: + timeout = kwargs.pop("timeout", None) + stats = await self._client.service.get_statistics( # type: ignore + timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs + ) + except HttpResponseError as error: + _process_table_error(error) + return service_stats_deserialize(stats) + + @distributed_trace_async + async def get_service_properties(self, **kwargs) -> Dict[str, object]: + """Gets the properties of an account's Table service, + including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TableServiceProperties, or the result of cls(response) + :rtype: Dict[str, object] + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + timeout = kwargs.pop("timeout", None) + try: + service_props = await self._client.service.get_properties(timeout=timeout, **kwargs) # type: ignore + except HttpResponseError as error: + _process_table_error(error) + return service_properties_deserialize(service_props) + + @distributed_trace_async + async def set_service_properties( + self, + *, + analytics_logging: Optional['TableAnalyticsLogging'] = None, + hour_metrics: Optional['TableMetrics'] = None, + minute_metrics: Optional['TableMetrics'] = None, + cors: Optional[List['TableCorsRule']] = None, + **kwargs + ) -> None: + """Sets properties for an account's Table service endpoint, + including properties for Analytics and CORS (Cross-Origin Resource Sharing) rules. + + :keyword analytics_logging: Properties for analytics + :paramtype analytics_logging: ~azure.data.tables.TableAnalyticsLogging + :keyword hour_metrics: Hour level metrics + :paramtype hour_metrics: ~azure.data.tables.TableMetrics + :keyword minute_metrics: Minute level metrics + :paramtype minute_metrics: ~azure.data.tables.TableMetrics + :keyword cors: Cross-origin resource sharing rules + :paramtype cors: List[~azure.data.tables.TableCorsRule] + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + """ + if cors: + cors = [c._to_generated() for c in cors] # pylint:disable=protected-access + props = TableServiceProperties( + logging=analytics_logging, + hour_metrics=hour_metrics, + minute_metrics=minute_metrics, + cors=cors, # type: ignore + ) + try: + await self._client.service.set_properties(props, **kwargs) # type: ignore + except HttpResponseError as error: + _process_table_error(error) + + @distributed_trace_async + async def create_table(self, table_name: str, **kwargs) -> TableClient: + """Creates a new table under the given account. + + :param headers: + :param str table_name: The Table name. + :return: TableClient, or the result of cls(response) + :rtype: :class:`~azure.data.tables.aio.TableClient` + :raises: :class:`~azure.core.exceptions.ResourceExistsError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_create_delete_table_async.py + :start-after: [START create_table] + :end-before: [END create_table] + :language: python + :dedent: 8 + :caption: Creating a table from TableServiceClient. + """ + table = self.get_table_client(table_name=table_name) + await table.create_table(**kwargs) + return table + + @distributed_trace_async + async def create_table_if_not_exists(self, table_name: str, **kwargs) -> TableClient: + """Creates a new table if it does not currently exist. + If the table currently exists, the current table is + returned. + + :param table_name: The Table name. + :type table_name: str + :return: TableClient + :rtype: :class:`~azure.data.tables.aio.TableClient` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_create_delete_table_async.py + :start-after: [START create_if_not_exists] + :end-before: [END create_if_not_exists] + :language: python + :dedent: 8 + :caption: Creating a table if it does not already exist + """ + table = self.get_table_client(table_name=table_name) + try: + await table.create_table(**kwargs) + except ResourceExistsError: + pass + return table + + @distributed_trace_async + async def delete_table(self, table_name: str, **kwargs) -> None: + """Deletes a table under the current account. No error will be raised if + the table is not found. + + :param str table_name: The Table name. + :return: None + :rtype: None + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_create_delete_table_async.py + :start-after: [START delete_table] + :end-before: [END delete_table] + :language: python + :dedent: 8 + :caption: Deleting a table + """ + table = self.get_table_client(table_name=table_name) + await table.delete_table(**kwargs) + + @distributed_trace + def list_tables(self, **kwargs) -> AsyncItemPaged[TableItem]: + """Queries tables under the given account. + + :keyword int results_per_page: Number of tables per page in returned ItemPaged + :return: AsyncItemPaged[:class:`~azure.data.tables.TableItem`] + :rtype: ~azure.core.async_paging.AsyncItemPaged + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_query_tables_async.py + :start-after: [START tsc_list_tables] + :end-before: [END tsc_list_tables] + :language: python + :dedent: 16 + :caption: Listing all tables in an account + """ + top = kwargs.pop("results_per_page", None) + + command = functools.partial(self._client.table.query, **kwargs) + return AsyncItemPaged( + command, + results_per_page=top, + page_iterator_class=TablePropertiesPaged, + ) + + @distributed_trace + def query_tables(self, query_filter: str, **kwargs) -> AsyncItemPaged[TableItem]: + """Queries tables under the given account. + + :param str query_filter: Specify a filter to return certain tables. + :keyword int results_per_page: Number of tables per page in return ItemPaged + :keyword parameters: Dictionary for formatting query with additional, user defined parameters + :paramtype parameters: Dict[str, Any] + :return: AsyncItemPaged[:class:`~azure.data.tables.TableItem`] + :rtype: ~azure.core.async_paging.AsyncItemPaged + :raises: :class:`~azure.core.exceptions.HttpResponseError` + + .. admonition:: Example: + + .. literalinclude:: ../samples/async_samples/sample_query_tables_async.py + :start-after: [START tsc_query_tables] + :end-before: [END tsc_query_tables] + :language: python + :dedent: 16 + :caption: Querying tables in an account given specific parameters + """ + parameters = kwargs.pop("parameters", None) + query_filter = _parameter_filter_substitution( + parameters, query_filter + ) + top = kwargs.pop("results_per_page", None) + command = functools.partial(self._client.table.query, **kwargs) + return AsyncItemPaged( + command, + results_per_page=top, + filter=query_filter, + page_iterator_class=TablePropertiesPaged, + ) + + def get_table_client(self, table_name: str, **kwargs) -> TableClient: + """Get a client to interact with the specified table. + + The table need not already exist. + + :param str table_name: The table name + :returns: A :class:`~azure.data.tables.aio.TableClient` object. + :rtype: :class:`~azure.data.tables.aio.TableClient` + + """ + pipeline = AsyncPipeline( # type: ignore + transport=AsyncTransportWrapper(self._client._client._pipeline._transport), # pylint:disable=protected-access + policies=self._policies, + ) + return TableClient( + self.url, + table_name=table_name, + credential=self.credential, # type: ignore + api_version=self.api_version, + pipeline=pipeline, + location_mode=self._location_mode, + _hosts=self._hosts, + **kwargs + ) diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/py.typed b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/azure/eventhub/extensions/checkpointstoretable/_vendor/data/tables/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/dev_requirements.txt b/sdk/eventhub/azure-eventhub-checkpointstoretable/dev_requirements.txt index 30f891539e8f..cfd543b488e3 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoretable/dev_requirements.txt +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/dev_requirements.txt @@ -1,3 +1,3 @@ -e ../../../tools/azure-sdk-tools -../../core/azure-core +aiohttp>=3.0; python_version >= '3.5' -e ../../../tools/azure-devtools \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/samples/receive_events_using_checkpoint_store.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/samples/receive_events_using_checkpoint_store.py new file mode 100644 index 000000000000..822fdf1e0ef5 --- /dev/null +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/samples/receive_events_using_checkpoint_store.py @@ -0,0 +1,33 @@ +import os +from azure.eventhub import EventHubConsumerClient +from azure.eventhub.extensions.checkpointstoretable import TableCheckpointStore + +CONNECTION_STR = os.environ["EVENT_HUB_CONN_STR"] +EVENTHUB_NAME = os.environ["EVENT_HUB_NAME"] +STORAGE_CONNECTION_STR = os.environ["AZURE_STORAGE_CONN_STR"] +TABLE_NAME = "your-table-name" # Please make sure the table resource exists. + + +def on_event(partition_context, event): + # Put your code here. + # Avoid time-consuming operations. + print(event) + partition_context.update_checkpoint(event) + + +if __name__ == "__main__": + checkpoint_store = TableCheckpointStore.from_connection_string( + STORAGE_CONNECTION_STR, + table_name=TABLE_NAME, + ) + client = EventHubConsumerClient.from_connection_string( + CONNECTION_STR, + consumer_group="$Default", + eventhub_name=EVENTHUB_NAME, + checkpoint_store=checkpoint_store, + ) + + try: + client.receive(on_event) + except KeyboardInterrupt: + client.close() diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/setup.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/setup.py index adff84f000bc..4a981983cfa7 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoretable/setup.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/setup.py @@ -1,24 +1,23 @@ +#!/usr/bin/env python + +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + from setuptools import setup, find_packages import os from io import open import re -# example setup.py Feel free to copy the entire "azure-template" folder into a package folder named -# with "azure-". Ensure that the below arguments to setup() are updated to reflect -# your package. - -# this setup.py is set up in a specific way to keep the azure* and azure-mgmt-* namespaces WORKING all the way -# up from python 2.7. Reference here: https://github.com/Azure/azure-sdk-for-python/wiki/Azure-packaging - PACKAGE_NAME = "azure-eventhub-checkpointstoretable" PACKAGE_PPRINT_NAME = "Event Hubs checkpointer implementation with Azure Table Storage" # a-b-c => a/b/c -package_folder_path = PACKAGE_NAME.replace("-", "/") -# a-b-c => a.b.c -namespace_name = PACKAGE_NAME.replace("-", ".") - package_folder_path = "azure/eventhub/extensions/checkpointstoretable" +# a-b-c => a.b.c +namespace_name = "azure.eventhub.extensions.checkpointstoretable" # Version extraction inspired from 'requests' with open(os.path.join(package_folder_path, "_version.py"), "r") as fd: @@ -59,18 +58,27 @@ # Exclude packages that will be covered by PEP420 or nspkg # This means any folder structure that only consists of a __init__.py. # For example, for storage, this would mean adding 'azure.storage' - # in addition to the default 'azure' that is seen here. - "azure", + 'samples', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + 'azure.eventhub', + 'azure.eventhub.extensions', ] ), install_requires=[ - "azure-core<2.0.0,>=1.2.2", + "azure-core<2.0.0,>=1.14.0", + 'azure-eventhub<6.0.0,>=5.0.0', + 'msrest>=0.5.0', + 'azure-eventhub<6.0.0,>=5.0.0', ], extras_require={ ":python_version<'3.0'": ["azure-nspkg"], + ":python_version<'3.0'": ['futures', 'azure-data-nspkg<2.0.0,>=1.0.0'], + ":python_version<'3.4'": ['enum34>=1.0.4'], + ":python_version<'3.5'": ["typing"], }, project_urls={ "Bug Reports": "https://github.com/Azure/azure-sdk-for-python/issues", "Source": "https://github.com/Azure/azure-sdk-python", }, -) +) \ No newline at end of file diff --git a/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py b/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py index 612a196e10ac..f6c0f8bf0e09 100644 --- a/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py +++ b/sdk/eventhub/azure-eventhub-checkpointstoretable/tests/test_storage_table_partition_manager.py @@ -1,8 +1,212 @@ +# ------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# -------------------------------------------------------------------------- + import pytest +import uuid +import warnings +import os + +from azure.eventhub.extensions.checkpointstoretable._vendor.data.tables import TableServiceClient from azure.eventhub.extensions.checkpointstoretable import TableCheckpointStore +from azure.eventhub.exceptions import OwnershipLostError + +STORAGE_CONN_STR = [ + #os.environ.get("AZURE_STORAGE_CONN_STR", "Azure Storage Connection String"), + os.environ.get("AZURE_COSMOS_CONN_STR", "Azure Storage Connection String"), +] + + +def get_live_storage_table_client(storage_connection_str): + try: + table_name = "table{}".format(uuid.uuid4().hex) + table_service_client = TableServiceClient.from_connection_string( + storage_connection_str + ) + table_service_client.create_table_if_not_exists(table_name) + return storage_connection_str, table_name + except: + pytest.skip("Storage table client can't be created") + + +def remove_live_storage_table_client(storage_connection_str, table_name): + try: + table_service_client = TableServiceClient.from_connection_string( + storage_connection_str + ) + table_service_client.delete_table(table_name) + except: + warnings.warn(UserWarning("storage table teardown failed")) + + +def _create_checkpoint(partition_id, offset, sequence_number): + return { + "fully_qualified_namespace": "test_namespace", + "eventhub_name": "eventhub", + "consumer_group": "$default", + "partition_id": str(partition_id), + "offset": offset, + "sequence_number": sequence_number, + } + + +def _create_ownership(partition_id, owner_id, etag, last_modified_time): + return { + "fully_qualified_namespace": "test_namespace", + "eventhub_name": "eventhub", + "consumer_group": "$default", + "partition_id": str(partition_id), + "owner_id": owner_id, + "etag": etag, + "last_modified_time": last_modified_time, + } + + +def _claim_ownership_exception_test(storage_connection_str, table_name): + fully_qualified_namespace = "test_namespace" + eventhub_name = "eventhub" + consumer_group = "$default" + ownership_cnt = 8 + + checkpoint_store = TableCheckpointStore.from_connection_string( + storage_connection_str, table_name + ) + ownership_list = [] + for i in range(ownership_cnt): + ownership = _create_ownership(str(i), "owner_id", None, None) + ownership_list.append(ownership) + result_ownership_list = checkpoint_store.claim_ownership(ownership_list) + assert result_ownership_list[0]["owner_id"] == "owner_id" + single_ownership = [result_ownership_list[0].copy()] + single_ownership[0]["owner_id"] = "Bill" + ownership_list = checkpoint_store.claim_ownership(single_ownership) + assert ownership_list[0]["owner_id"] == "Bill" + + single_ownership = [result_ownership_list[0].copy()] + single_ownership[0]["etag"] = "W/\"datetime'2021-08-02T00%3A46%3A51.7645424Z'\"" + single_ownership[0]["owner_id"] = "Jack" + single_ownership[0]["partition_id"] = "10" + result_ownership = checkpoint_store.claim_ownership(single_ownership) + list_ownership = checkpoint_store.list_ownership( + fully_qualified_namespace, eventhub_name, consumer_group + ) + assert result_ownership[0] in list_ownership + + single_ownership = [result_ownership_list[0].copy()] + single_ownership[0]["etag"] = "W/\"datetime'2021-08-02T00%3A46%3A51.7645424Z'\"" + with pytest.raises(OwnershipLostError) as e_info: + checkpoint_store.claim_ownership(single_ownership) + + +def _claim_and_list_ownership(storage_connection_str, table_name): + fully_qualified_namespace = "test_namespace" + eventhub_name = "eventhub" + consumer_group = "$default" + ownership_cnt = 8 + + checkpoint_store = TableCheckpointStore.from_connection_string( + storage_connection_str, table_name + ) + ownership_list = checkpoint_store.list_ownership( + fully_qualified_namespace, eventhub_name, consumer_group + ) + assert len(ownership_list) == 0 + + ownership_list = [] + + for i in range(ownership_cnt): + ownership = _create_ownership(str(i), "owner_id", None, None) + ownership_list.append(ownership) + result_ownership_list = checkpoint_store.claim_ownership(ownership_list) + assert ownership_list != result_ownership_list + assert len(result_ownership_list) == len(ownership_list) + for i in range(len(ownership_list)): + assert ownership_list[i]["etag"] != result_ownership_list[i]["etag"] + assert ( + ownership_list[i]["last_modified_time"] + != result_ownership_list[i]["last_modified_time"] + ) + + ownership_list = checkpoint_store.list_ownership( + fully_qualified_namespace, eventhub_name, consumer_group + ) + assert len(ownership_list) == ownership_cnt + assert len(ownership_list) == len(result_ownership_list) + for i in range(len(result_ownership_list)): + assert ownership_list[i]["etag"] == result_ownership_list[i]["etag"] + assert ( + ownership_list[i]["last_modified_time"] + == result_ownership_list[i]["last_modified_time"] + ) + + +def _update_and_list_checkpoint(storage_connection_str, table_name): + fully_qualified_namespace = "test_namespace" + eventhub_name = "eventhub" + consumer_group = "$default" + partition_cnt = 8 + + checkpoint_store = TableCheckpointStore.from_connection_string( + storage_connection_str, table_name + ) + checkpoint_list = checkpoint_store.list_checkpoints( + fully_qualified_namespace, eventhub_name, consumer_group + ) + assert len(checkpoint_list) == 0 + for i in range(partition_cnt): + checkpoint = _create_checkpoint(i, 2, 20) + checkpoint_store.update_checkpoint(checkpoint) + + checkpoint_list = checkpoint_store.list_checkpoints( + fully_qualified_namespace, eventhub_name, consumer_group + ) + assert len(checkpoint_list) == partition_cnt + for checkpoint in checkpoint_list: + assert checkpoint["offset"] == "2" + assert checkpoint["sequence_number"] == 20 + + checkpoint = _create_checkpoint(0, "30", 42) + checkpoint_store.update_checkpoint(checkpoint) + checkpoint_list = checkpoint_store.list_checkpoints( + fully_qualified_namespace, eventhub_name, consumer_group + ) + assert len(checkpoint_list) == partition_cnt + assert checkpoint_list[0]["offset"] == "30" + + +@pytest.mark.parametrize("storage_connection_str", STORAGE_CONN_STR) +@pytest.mark.liveTest +def test_claim_ownership_exception(storage_connection_str): + storage_connection_str, table_name = get_live_storage_table_client( + storage_connection_str + ) + try: + _claim_ownership_exception_test(storage_connection_str, table_name) + finally: + remove_live_storage_table_client(storage_connection_str, table_name) + + +@pytest.mark.parametrize("storage_connection_str", STORAGE_CONN_STR) +@pytest.mark.liveTest +def test_claim_and_list_ownership(storage_connection_str): + storage_connection_str, table_name = get_live_storage_table_client( + storage_connection_str + ) + try: + _claim_and_list_ownership(storage_connection_str, table_name) + finally: + remove_live_storage_table_client(storage_connection_str, table_name) -def test_constructor(): - client = TableCheckpointStore() - assert client is not None - +@pytest.mark.parametrize("storage_connection_str", STORAGE_CONN_STR) +@pytest.mark.liveTest +def test_update_checkpoint(storage_connection_str): + storage_connection_str, table_name = get_live_storage_table_client( + storage_connection_str + ) + try: + _update_and_list_checkpoint(storage_connection_str, table_name) + finally: + remove_live_storage_table_client(storage_connection_str, table_name) diff --git a/sdk/eventhub/azure-mgmt-eventhub/dev_requirements.txt b/sdk/eventhub/azure-mgmt-eventhub/dev_requirements.txt index cfd543b488e3..501388194f4f 100644 --- a/sdk/eventhub/azure-mgmt-eventhub/dev_requirements.txt +++ b/sdk/eventhub/azure-mgmt-eventhub/dev_requirements.txt @@ -1,3 +1,5 @@ -e ../../../tools/azure-sdk-tools -aiohttp>=3.0; python_version >= '3.5' +../../core/azure-core +../azure-eventhub +../../tables/azure-data-tables -e ../../../tools/azure-devtools \ No newline at end of file diff --git a/sdk/eventhub/tests.yml b/sdk/eventhub/tests.yml index a4912dfe1dec..024fa153ea3d 100644 --- a/sdk/eventhub/tests.yml +++ b/sdk/eventhub/tests.yml @@ -19,3 +19,4 @@ stages: AZURE_TENANT_ID: $(python-eh-livetest-event-hub-aad-tenant-id) AZURE_CLIENT_SECRET: $(python-eh-livetest-event-hub-aad-secret) AZURE_SUBSCRIPTION_ID: $(python-eh-livetest-event-hub-subscription-id) + AZURE_COSMOS_CONN_STR: $(python-eventhub-livetest-cosmos-conn-str) diff --git a/shared_requirements.txt b/shared_requirements.txt index 3a1b2804d60f..e53fa78035b5 100644 --- a/shared_requirements.txt +++ b/shared_requirements.txt @@ -176,6 +176,7 @@ opentelemetry-sdk<2.0.0,>=1.0.0 #override azure-eventhub-checkpointstoreblob azure-core<2.0.0,>=1.10.0 #override azure-eventhub-checkpointstoreblob-aio azure-core<2.0.0,>=1.10.0 #override azure-eventhub-checkpointstoreblob-aio aiohttp<4.0,>=3.0 +#override azure-eventhub-checkpointstoretable azure-core<2.0.0,>=1.14.0 #override azure-eventhub uamqp>=1.4.1,<2.0.0 #override azure-appconfiguration msrest>=0.6.10 #override azure-mgmt-appconfiguration msrest>=0.6.21