diff --git a/sdk/cosmos/azure-cosmos/HISTORY.md b/sdk/cosmos/azure-cosmos/HISTORY.md index 28da1d733e79..a72609a1be0a 100644 --- a/sdk/cosmos/azure-cosmos/HISTORY.md +++ b/sdk/cosmos/azure-cosmos/HISTORY.md @@ -1,4 +1,46 @@ # Change Log azure-cosmos + +## Version 4.0.0b2: + +Version 4.0.0b2 is the second iteration in our efforts to build a more Pythonic client library. + +**Breaking changes** + +- The client connection has been adapted to consume the HTTP pipeline defined in `azure.core.pipeline`. +- Interactive objects have now been renamed as proxies. This includes: + - `Database` -> `DatabaseProxy` + - `User` -> `UserProxy` + - `Container` -> `ContainerProxy` + - `Scripts` -> `ScriptsProxy` +- The constructor of `CosmosClient` has been updated: + - The `auth` parameter has been renamed to `credential` and will now take an authentication type directly. This means the master key value, a dictionary of resource tokens, or a list of permissions can be passed in. However the old dictionary format is still supported. + - The `connection_policy` parameter has been made a keyword only parameter, and while it is still supported, each of the individual attributes of the policy can now be passed in as explicit keyword arguments: + - `request_timeout` + - `media_request_timeout` + - `connection_mode` + - `media_read_mode` + - `proxy_config` + - `enable_endpoint_discovery` + - `preferred_locations` + - `multiple_write_locations` +- A new classmethod constructor has been added to `CosmosClient` to enable creation via a connection string retrieved from the Azure portal. +- Some `read_all` operations have been renamed to `list` operations: + - `CosmosClient.read_all_databases` -> `CosmosClient.list_databases` + - `Container.read_all_conflicts` -> `ContainerProxy.list_conflicts` + - `Database.read_all_containers` -> `DatabaseProxy.list_containers` + - `Database.read_all_users` -> `DatabaseProxy.list_users` + - `User.read_all_permissions` -> `UserProxy.list_permissions` +- All operations that take `request_options` or `feed_options` parameters, these have been moved to keyword only parameters. In addition, while these options dictionaries are still supported, each of the individual options within the dictionary are now supported as explicit keyword arguments. +- The error heirarchy is now inherited from `azure.core.AzureError` instead of `CosmosError` which has been removed. + - `HTTPFailure` has been renamed to `CosmosHttpResponseError` + - `JSONParseFailure` has been removed and replaced by `azure.core.DecodeError` + - Added additional errors for specific response codes: + - `CosmosResourceNotFoundError` for status 404 + - `CosmosResourceExistsError` for status 409 + - `CosmosAccessConditionFailedError` for status 412 +- `CosmosClient` can now be run in a context manager to handle closing the client connection. +- Iterable responses (e.g. query responses and list responses) are now of type `azure.core.paging.ItemPaged`. The method `fetch_next_block` has been replaced by a secondary iterator, accessed by the `by_page` method. + ## Version 4.0.0b1: Version 4.0.0b1 is the first preview of our efforts to create a user-friendly and Pythonic client library for Azure Cosmos. For more information about this, and preview releases of other Azure SDK libraries, please visit https://aka.ms/azure-sdk-preview1-python. diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index b3ca6d2ab67d..84bf724222dd 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -63,23 +63,21 @@ export ACCOUNT_KEY=$(az cosmosdb list-keys --resource-group $RES_GROUP --name $A Once you've populated the `ACCOUNT_URI` and `ACCOUNT_KEY` environment variables, you can create the [CosmosClient][ref_cosmosclient]. ```Python -from azure.cosmos import HTTPFailure, CosmosClient, Container, Database, PartitionKey +from azure.cosmos import CosmosClient, Container, Database, PartitionKey, errors import os url = os.environ['ACCOUNT_URI'] key = os.environ['ACCOUNT_KEY'] -client = CosmosClient(url, auth = { - 'masterKey': key -}) +client = CosmosClient(url, credential=key) ``` ## Usage Once you've initialized a [CosmosClient][ref_cosmosclient], you can interact with the primary resource types in Cosmos DB: -* [Database][ref_database]: A Cosmos DB account can contain multiple databases. When you create a database, you specify the API you'd like to use when interacting with its documents: SQL, MongoDB, Gremlin, Cassandra, or Azure Table. Use the [Database][ref_database] object to manage its containers. +* [Database][ref_database]: A Cosmos DB account can contain multiple databases. When you create a database, you specify the API you'd like to use when interacting with its documents: SQL, MongoDB, Gremlin, Cassandra, or Azure Table. Use the [DatabaseProxy][ref_database] object to manage its containers. -* [Container][ref_container]: A container is a collection of JSON documents. You create (insert), read, update, and delete items in a container by using methods on the [Container][ref_container] object. +* [Container][ref_container]: A container is a collection of JSON documents. You create (insert), read, update, and delete items in a container by using methods on the [ContainerProxy][ref_container] object. * [Item][ref_item]: An Item is the dictionary-like representation of a JSON document stored in a container. Each Item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. @@ -106,9 +104,7 @@ After authenticating your [CosmosClient][ref_cosmosclient], you can work with an database_name = 'testDatabase' try: database = client.create_database(database_name) -except HTTPFailure as e: - if e.status_code != 409: - raise +except errors.CosmosResourceExistsError: database = client.get_database_client(database_name) ``` @@ -120,13 +116,13 @@ This example creates a container with default settings. If a container with the container_name = 'products' try: container = database.create_container(id=container_name, partition_key=PartitionKey(path="/productName")) -except HTTPFailure as e: - if e.status_code != 409: - raise +except errors.CosmosResourceExistsError: container = database.get_container_client(container_name) +except errors.CosmosHttpResponseError: + raise ``` -The preceding snippet also handles the [HTTPFailure][ref_httpfailure] exception if the container creation failed. For more information on error handling and troubleshooting, see the [Troubleshooting](#troubleshooting) section. +The preceding snippet also handles the [CosmosHttpResponseError][ref_httpfailure] exception if the container creation failed. For more information on error handling and troubleshooting, see the [Troubleshooting](#troubleshooting) section. ### Get an existing container @@ -139,7 +135,7 @@ container = database.get_container_client(container_name) ### Insert data -To insert items into a container, pass a dictionary containing your data to [Container.upsert_item][ref_container_upsert_item]. Each item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. +To insert items into a container, pass a dictionary containing your data to [ContainerProxy.upsert_item][ref_container_upsert_item]. Each item you add to a container must include an `id` key with a value that uniquely identifies the item within the container. This example inserts several items into the container, each with a unique `id`: @@ -158,7 +154,7 @@ for i in range(1, 10): ### Delete data -To delete items from a container, use [Container.delete_item][ref_container_delete_item]. The SQL API in Cosmos DB does not support the SQL `DELETE` statement. +To delete items from a container, use [ContainerProxy.delete_item][ref_container_delete_item]. The SQL API in Cosmos DB does not support the SQL `DELETE` statement. ```Python for item in container.query_items(query='SELECT * FROM products p WHERE p.productModel = "DISCONTINUED"', @@ -168,7 +164,7 @@ for item in container.query_items(query='SELECT * FROM products p WHERE p.produc ### Query the database -A Cosmos DB SQL API database supports querying the items in a container with [Container.query_items][ref_container_query_items] using SQL-like syntax. +A Cosmos DB SQL API database supports querying the items in a container with [ContainerProxy.query_items][ref_container_query_items] using SQL-like syntax. This example queries a container for items with a specific `id`: @@ -186,7 +182,7 @@ for item in container.query_items( > NOTE: Although you can specify any value for the container name in the `FROM` clause, we recommend you use the container name for consistency. -Perform parameterized queries by passing a dictionary containing the parameters and their values to [Container.query_items][ref_container_query_items]: +Perform parameterized queries by passing a dictionary containing the parameters and their values to [ContainerProxy.query_items][ref_container_query_items]: ```Python discontinued_items = container.query_items( @@ -243,13 +239,11 @@ For example, if you try to create a container using an ID (name) that's already ```Python try: database.create_container(id=container_name, partition_key=PartitionKey(path="/productName") -except HTTPFailure as e: - if e.status_code == 409: - print("""Error creating container. +except errors.CosmosResourceExistsError: + print("""Error creating container HTTP status code 409: The ID (name) provided for the container is already in use. The container name must be unique within the database.""") - else: - raise + ``` ## More sample code @@ -285,7 +279,7 @@ For more extensive documentation on the Cosmos DB service, see the [Azure Cosmos [ref_cosmosclient_create_database]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.CosmosClient.create_database [ref_cosmosclient]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.CosmosClient [ref_database]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.Database -[ref_httpfailure]: https://docs.microsoft.com/python/api/azure-cosmos/azure.cosmos.errors.httpfailure +[ref_httpfailure]: https://docs.microsoft.com/python/api/azure-cosmos/azure.cosmos.errors.CosmosHttpResponseError [ref_item]: http://cosmosproto.westus.azurecontainer.io/#azure.cosmos.Item [sample_database_mgmt]: https://github.com/binderjoe/cosmos-python-prototype/blob/master/examples/databasemanagementsample.py [sample_document_mgmt]: https://github.com/binderjoe/cosmos-python-prototype/blob/master/examples/documentmanagementsample.py diff --git a/sdk/cosmos/azure-cosmos/azure/__init__.py b/sdk/cosmos/azure-cosmos/azure/__init__.py index 8db66d3d0f0f..d55ccad1f573 100644 --- a/sdk/cosmos/azure-cosmos/azure/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py index 07f3ca79fb93..93920f280f41 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py @@ -19,9 +19,11 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -from .container import Container +from .container import ContainerProxy from .cosmos_client import CosmosClient -from .database import Database +from .database import DatabaseProxy +from .user import UserProxy +from .scripts import ScriptsProxy from .documents import ( ConsistencyLevel, DataType, @@ -35,18 +37,16 @@ ) from .partition_key import PartitionKey from .permission import Permission -from .scripts import Scripts -from .user import User from .version import VERSION __all__ = ( - "Container", "CosmosClient", - "Database", + "DatabaseProxy", + "ContainerProxy", "PartitionKey", "Permission", - "Scripts", - "User", + "ScriptsProxy", + "UserProxy", "ConsistencyLevel", "DataType", "IndexKind", @@ -57,4 +57,4 @@ "TriggerOperation", "TriggerType", ) -__version__ = VERSION \ No newline at end of file +__version__ = VERSION diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index 148e73a3a16c..265d108e178b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -27,6 +27,7 @@ import json import uuid import binascii +from typing import Dict, Any import six from six.moves.urllib.parse import quote as urllib_quote @@ -39,6 +40,45 @@ # pylint: disable=protected-access +_COMMON_OPTIONS = { + 'initial_headers': 'initialHeaders', + 'pre_trigger_include': 'preTriggerInclude', + 'post_trigger_include': 'postTriggerInclude', + 'max_item_count': 'maxItemCount', + 'access_condition': 'accessCondition', + 'indexing_directive': 'indexingDirective', + 'consistency_level': 'consistencyLevel', + 'session_token': 'sessionToken', + 'enable_scan_in_query': 'enableScanInQuery', + 'resource_token_expiry_seconds': 'resourceTokenExpirySeconds', + 'offer_type': 'offerType', + 'offer_throughput': 'offerThroughput', + 'partition_key': 'partitionKey', + 'enable_cross_partition_query': 'enableCrossPartitionQuery', + 'populate_query_metrics': 'populateQueryMetrics', + 'enable_script_logging': 'enableScriptLogging', + 'offer_enable_ru_per_minute_throughput': 'offerEnableRUPerMinuteThroughput', + 'disable_ru_per_minute_usage': 'disableRUPerMinuteUsage', + 'change_feed': 'changeFeed', + 'continuation': 'continuation', + 'is_start_from_beginning': 'isStartFromBeginning', + 'populate_partition_key_range_statistics': 'populatePartitionKeyRangeStatistics', + 'populate_quota_info': 'populateQuotaInfo' +} + +def build_options(kwargs): + # type: (Dict[str, Any]) -> Dict[str, Any] + options = kwargs.pop('request_options', kwargs.pop('feed_options', {})) + for key, value in _COMMON_OPTIONS.items(): + if key in kwargs: + options[value] = kwargs.pop(key) + + if 'if_match' in kwargs: + options['accessCondition'] = {'type': 'IfMatch', 'condition': kwargs.pop('if_match')} + if 'if_none_match' in kwargs: + options['accessCondition'] = {'type': 'IfNoneMatch', 'condition': kwargs.pop('if_none_match')} + return options + def GetHeaders( # pylint: disable=too-many-statements,too-many-branches cosmos_client_connection, diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py index 23c5c4c432d7..1fa3d3db9934 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_cosmos_client_connection.py @@ -24,22 +24,22 @@ """Document client class for the Azure Cosmos database service. """ -import platform - -import requests +from typing import Dict, Any, Optional import six -from azure.core import PipelineClient -from azure.core.pipeline.policies import ( +from azure.core.paging import ItemPaged # type: ignore +from azure.core import PipelineClient # type: ignore +from azure.core.pipeline.policies import ( # type: ignore ContentDecodePolicy, HeadersPolicy, UserAgentPolicy, NetworkTraceLoggingPolicy, CustomHookPolicy, ProxyPolicy) -from azure.core.pipeline.policies.distributed_tracing import DistributedTracingPolicy +from azure.core.pipeline.policies.distributed_tracing import DistributedTracingPolicy # type: ignore from . import _base as base from . import documents +from .documents import ConnectionPolicy from . import _constants as constants from . import http_constants from . import _query_iterable as query_iterable @@ -51,7 +51,6 @@ from . import _session from . import _utils from .partition_key import _Undefined, _Empty -from .version import VERSION # pylint: disable=protected-access @@ -81,8 +80,14 @@ class _QueryCompatibilityMode: _DefaultStringRangePrecision = -1 def __init__( - self, url_connection, auth, connection_policy=None, consistency_level=documents.ConsistencyLevel.Session + self, + url_connection, # type: str + auth, # type: Dict[str, Any] + connection_policy=None, # type: Optional[ConnectionPolicy] + consistency_level=documents.ConsistencyLevel.Session, # type: str + **kwargs # type: Any ): + # type: (...) -> None """ :param str url_connection: The URL for connecting to the DB server. @@ -112,16 +117,15 @@ def __init__( id_ = resource_parts[-1] self.resource_tokens[id_] = permission_feed["_token"] - self.connection_policy = connection_policy or documents.ConnectionPolicy() + self.connection_policy = connection_policy or ConnectionPolicy() - self.partition_resolvers = {} + self.partition_resolvers = {} # type: Dict[str, Any] - self.partition_key_definition_cache = {} + self.partition_key_definition_cache = {} # type: Dict[str, Any] self.default_headers = { http_constants.HttpHeaders.CacheControl: "no-cache", http_constants.HttpHeaders.Version: http_constants.Versions.CurrentVersion, - http_constants.HttpHeaders.UserAgent: _utils.get_user_agent(), # For single partition query with aggregate functions we would try to accumulate the results on the SDK. # We need to set continuation as not expected. http_constants.HttpHeaders.IsContinuationExpected: False, @@ -139,30 +143,26 @@ def __init__( # via setter self.session = _session.Session(self.url_connection) else: - self.session = None + self.session = None # type: ignore self._useMultipleWriteLocations = False self._global_endpoint_manager = global_endpoint_manager._GlobalEndpointManager(self) - proxies = {} + proxies = kwargs.pop('proxies', {}) if self.connection_policy.ProxyConfiguration and self.connection_policy.ProxyConfiguration.Host: - host = connection_policy.ProxyConfiguration.Host + host = self.connection_policy.ProxyConfiguration.Host url = six.moves.urllib.parse.urlparse(host) - proxy = host if url.port else host + ":" + str(connection_policy.ProxyConfiguration.Port) - proxies = {url.scheme : proxy} - user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format( - VERSION, - platform.python_version(), - platform.platform()) + proxy = host if url.port else host + ":" + str(self.connection_policy.ProxyConfiguration.Port) + proxies.update({url.scheme : proxy}) policies = [ - HeadersPolicy(), + HeadersPolicy(**kwargs), ProxyPolicy(proxies=proxies), - UserAgentPolicy(base_user_agent=user_agent), + UserAgentPolicy(base_user_agent=_utils.get_user_agent(), **kwargs), ContentDecodePolicy(), - CustomHookPolicy(), + CustomHookPolicy(**kwargs), DistributedTracingPolicy(), - NetworkTraceLoggingPolicy(), + NetworkTraceLoggingPolicy(**kwargs), ] self.pipeline_client = PipelineClient(url_connection, "empty-config", policies=policies) @@ -235,7 +235,7 @@ def GetPartitionResolver(self, database_link): return self.partition_resolvers.get(base.TrimBeginningAndEndingSlashes(database_link)) - def CreateDatabase(self, database, options=None): + def CreateDatabase(self, database, options=None, **kwargs): """Creates a database. :param dict database: @@ -253,9 +253,9 @@ def CreateDatabase(self, database, options=None): CosmosClientConnection.__ValidateResource(database) path = "/dbs" - return self.Create(database, path, "dbs", None, None, options) + return self.Create(database, path, "dbs", None, None, options, **kwargs) - def ReadDatabase(self, database_link, options=None): + def ReadDatabase(self, database_link, options=None, **kwargs): """Reads a database. :param str database_link: @@ -273,9 +273,9 @@ def ReadDatabase(self, database_link, options=None): path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.Read(path, "dbs", database_id, None, options) + return self.Read(path, "dbs", database_id, None, options, **kwargs) - def ReadDatabases(self, options=None): + def ReadDatabases(self, options=None, **kwargs): """Reads all databases. :param dict options: @@ -290,9 +290,9 @@ def ReadDatabases(self, options=None): if options is None: options = {} - return self.QueryDatabases(None, options) + return self.QueryDatabases(None, options, **kwargs) - def QueryDatabases(self, query, options=None): + def QueryDatabases(self, query, options=None, **kwargs): """Queries databases. :param (str or dict) query: @@ -309,13 +309,18 @@ def QueryDatabases(self, query, options=None): def fetch_fn(options): return ( - self.__QueryFeed("/dbs", "dbs", "", lambda r: r["Databases"], lambda _, b: b, query, options), + self.__QueryFeed( + "/dbs", "dbs", "", lambda r: r["Databases"], + lambda _, b: b, query, options, **kwargs + ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def ReadContainers(self, database_link, options=None): + def ReadContainers(self, database_link, options=None, **kwargs): """Reads all collections in a database. :param str database_link: @@ -331,9 +336,9 @@ def ReadContainers(self, database_link, options=None): if options is None: options = {} - return self.QueryContainers(database_link, None, options) + return self.QueryContainers(database_link, None, options, **kwargs) - def QueryContainers(self, database_link, query, options=None): + def QueryContainers(self, database_link, query, options=None, **kwargs): """Queries collections in a database. :param str database_link: @@ -356,14 +361,17 @@ def QueryContainers(self, database_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "colls", database_id, lambda r: r["DocumentCollections"], lambda _, body: body, query, options + path, "colls", database_id, lambda r: r["DocumentCollections"], + lambda _, body: body, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateContainer(self, database_link, collection, options=None): + def CreateContainer(self, database_link, collection, options=None, **kwargs): """Creates a collection in a database. :param str database_link: @@ -383,9 +391,9 @@ def CreateContainer(self, database_link, collection, options=None): CosmosClientConnection.__ValidateResource(collection) path = base.GetPathFromLink(database_link, "colls") database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.Create(collection, path, "colls", database_id, None, options) + return self.Create(collection, path, "colls", database_id, None, options, **kwargs) - def ReplaceContainer(self, collection_link, collection, options=None): + def ReplaceContainer(self, collection_link, collection, options=None, **kwargs): """Replaces a collection and return it. :param str collection_link: @@ -407,9 +415,9 @@ def ReplaceContainer(self, collection_link, collection, options=None): CosmosClientConnection.__ValidateResource(collection) path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.Replace(collection, path, "colls", collection_id, None, options) + return self.Replace(collection, path, "colls", collection_id, None, options, **kwargs) - def ReadContainer(self, collection_link, options=None): + def ReadContainer(self, collection_link, options=None, **kwargs): """Reads a collection. :param str collection_link: @@ -428,9 +436,9 @@ def ReadContainer(self, collection_link, options=None): path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.Read(path, "colls", collection_id, None, options) + return self.Read(path, "colls", collection_id, None, options, **kwargs) - def CreateUser(self, database_link, user, options=None): + def CreateUser(self, database_link, user, options=None, **kwargs): """Creates a user. :param str database_link: @@ -450,9 +458,9 @@ def CreateUser(self, database_link, user, options=None): options = {} database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) - return self.Create(user, path, "users", database_id, None, options) + return self.Create(user, path, "users", database_id, None, options, **kwargs) - def UpsertUser(self, database_link, user, options=None): + def UpsertUser(self, database_link, user, options=None, **kwargs): """Upserts a user. :param str database_link: @@ -470,7 +478,7 @@ def UpsertUser(self, database_link, user, options=None): options = {} database_id, path = self._GetDatabaseIdWithPathForUser(database_link, user) - return self.Upsert(user, path, "users", database_id, None, options) + return self.Upsert(user, path, "users", database_id, None, options, **kwargs) def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(user) @@ -478,7 +486,7 @@ def _GetDatabaseIdWithPathForUser(self, database_link, user): # pylint: disable database_id = base.GetResourceIdOrFullNameFromLink(database_link) return database_id, path - def ReadUser(self, user_link, options=None): + def ReadUser(self, user_link, options=None, **kwargs): """Reads a user. :param str user_link: @@ -497,9 +505,9 @@ def ReadUser(self, user_link, options=None): path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.Read(path, "users", user_id, None, options) + return self.Read(path, "users", user_id, None, options, **kwargs) - def ReadUsers(self, database_link, options=None): + def ReadUsers(self, database_link, options=None, **kwargs): """Reads all users in a database. :params str database_link: @@ -515,9 +523,9 @@ def ReadUsers(self, database_link, options=None): if options is None: options = {} - return self.QueryUsers(database_link, None, options) + return self.QueryUsers(database_link, None, options, **kwargs) - def QueryUsers(self, database_link, query, options=None): + def QueryUsers(self, database_link, query, options=None, **kwargs): """Queries users in a database. :param str database_link: @@ -540,13 +548,18 @@ def QueryUsers(self, database_link, query, options=None): def fetch_fn(options): return ( - self.__QueryFeed(path, "users", database_id, lambda r: r["Users"], lambda _, b: b, query, options), + self.__QueryFeed( + path, "users", database_id, lambda r: r["Users"], + lambda _, b: b, query, options, **kwargs + ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def DeleteDatabase(self, database_link, options=None): + def DeleteDatabase(self, database_link, options=None, **kwargs): """Deletes a database. :param str database_link: @@ -565,9 +578,9 @@ def DeleteDatabase(self, database_link, options=None): path = base.GetPathFromLink(database_link) database_id = base.GetResourceIdOrFullNameFromLink(database_link) - return self.DeleteResource(path, "dbs", database_id, None, options) + return self.DeleteResource(path, "dbs", database_id, None, options, **kwargs) - def CreatePermission(self, user_link, permission, options=None): + def CreatePermission(self, user_link, permission, options=None, **kwargs): """Creates a permission for a user. :param str user_link: @@ -587,9 +600,9 @@ def CreatePermission(self, user_link, permission, options=None): options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) - return self.Create(permission, path, "permissions", user_id, None, options) + return self.Create(permission, path, "permissions", user_id, None, options, **kwargs) - def UpsertPermission(self, user_link, permission, options=None): + def UpsertPermission(self, user_link, permission, options=None, **kwargs): """Upserts a permission for a user. :param str user_link: @@ -609,7 +622,7 @@ def UpsertPermission(self, user_link, permission, options=None): options = {} path, user_id = self._GetUserIdWithPathForPermission(permission, user_link) - return self.Upsert(permission, path, "permissions", user_id, None, options) + return self.Upsert(permission, path, "permissions", user_id, None, options, **kwargs) def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(permission) @@ -617,7 +630,7 @@ def _GetUserIdWithPathForPermission(self, permission, user_link): # pylint: dis user_id = base.GetResourceIdOrFullNameFromLink(user_link) return path, user_id - def ReadPermission(self, permission_link, options=None): + def ReadPermission(self, permission_link, options=None, **kwargs): """Reads a permission. :param str permission_link: @@ -636,9 +649,9 @@ def ReadPermission(self, permission_link, options=None): path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.Read(path, "permissions", permission_id, None, options) + return self.Read(path, "permissions", permission_id, None, options, **kwargs) - def ReadPermissions(self, user_link, options=None): + def ReadPermissions(self, user_link, options=None, **kwargs): """Reads all permissions for a user. :param str user_link: @@ -655,9 +668,9 @@ def ReadPermissions(self, user_link, options=None): if options is None: options = {} - return self.QueryPermissions(user_link, None, options) + return self.QueryPermissions(user_link, None, options, **kwargs) - def QueryPermissions(self, user_link, query, options=None): + def QueryPermissions(self, user_link, query, options=None, **kwargs): """Queries permissions for a user. :param str user_link: @@ -681,14 +694,16 @@ def QueryPermissions(self, user_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options + path, "permissions", user_id, lambda r: r["Permissions"], lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def ReplaceUser(self, user_link, user, options=None): + def ReplaceUser(self, user_link, user, options=None, **kwargs): """Replaces a user and return it. :param str user_link: @@ -709,9 +724,9 @@ def ReplaceUser(self, user_link, user, options=None): CosmosClientConnection.__ValidateResource(user) path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.Replace(user, path, "users", user_id, None, options) + return self.Replace(user, path, "users", user_id, None, options, **kwargs) - def DeleteUser(self, user_link, options=None): + def DeleteUser(self, user_link, options=None, **kwargs): """Deletes a user. :param str user_link: @@ -730,9 +745,9 @@ def DeleteUser(self, user_link, options=None): path = base.GetPathFromLink(user_link) user_id = base.GetResourceIdOrFullNameFromLink(user_link) - return self.DeleteResource(path, "users", user_id, None, options) + return self.DeleteResource(path, "users", user_id, None, options, **kwargs) - def ReplacePermission(self, permission_link, permission, options=None): + def ReplacePermission(self, permission_link, permission, options=None, **kwargs): """Replaces a permission and return it. :param str permission_link: @@ -753,9 +768,9 @@ def ReplacePermission(self, permission_link, permission, options=None): CosmosClientConnection.__ValidateResource(permission) path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.Replace(permission, path, "permissions", permission_id, None, options) + return self.Replace(permission, path, "permissions", permission_id, None, options, **kwargs) - def DeletePermission(self, permission_link, options=None): + def DeletePermission(self, permission_link, options=None, **kwargs): """Deletes a permission. :param str permission_link: @@ -774,9 +789,9 @@ def DeletePermission(self, permission_link, options=None): path = base.GetPathFromLink(permission_link) permission_id = base.GetResourceIdOrFullNameFromLink(permission_link) - return self.DeleteResource(path, "permissions", permission_id, None, options) + return self.DeleteResource(path, "permissions", permission_id, None, options, **kwargs) - def ReadItems(self, collection_link, feed_options=None, response_hook=None): + def ReadItems(self, collection_link, feed_options=None, response_hook=None, **kwargs): """Reads all documents in a collection. :param str collection_link: @@ -792,12 +807,20 @@ def ReadItems(self, collection_link, feed_options=None, response_hook=None): if feed_options is None: feed_options = {} - return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook) + return self.QueryItems(collection_link, None, feed_options, response_hook=response_hook, **kwargs) - def QueryItems(self, database_or_Container_link, query, options=None, partition_key=None, response_hook=None): + def QueryItems( + self, + database_or_container_link, + query, + options=None, + partition_key=None, + response_hook=None, + **kwargs + ): """Queries documents in a collection. - :param str database_or_Container_link: + :param str database_or_container_link: The link to the database when using partitioning, otherwise link to the document collection. :param (str or dict) query: :param dict options: @@ -813,20 +836,23 @@ def QueryItems(self, database_or_Container_link, query, options=None, partition_ query_iterable.QueryIterable """ - database_or_Container_link = base.TrimBeginningAndEndingSlashes(database_or_Container_link) + database_or_container_link = base.TrimBeginningAndEndingSlashes(database_or_container_link) if options is None: options = {} - if base.IsDatabaseLink(database_or_Container_link): - # Python doesn't have a good way of specifying an overloaded constructor, - # and this is how it's generally overloaded constructors are specified (by - # calling a @classmethod) and returning the 'self' instance - return query_iterable.QueryIterable.PartitioningQueryIterable( - self, query, options, database_or_Container_link, partition_key + if base.IsDatabaseLink(database_or_container_link): + return ItemPaged( + self, + query, + options, + database_link=database_or_container_link, + partition_key=partition_key, + page_iterator_class=query_iterable.QueryIterable ) - path = base.GetPathFromLink(database_or_Container_link, "docs") - collection_id = base.GetResourceIdOrFullNameFromLink(database_or_Container_link) + + path = base.GetPathFromLink(database_or_container_link, "docs") + collection_id = base.GetResourceIdOrFullNameFromLink(database_or_container_link) def fetch_fn(options): return ( @@ -839,13 +865,21 @@ def fetch_fn(options): query, options, response_hook=response_hook, + **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn, database_or_Container_link) + return ItemPaged( + self, + query, + options, + fetch_function=fetch_fn, + collection_link=database_or_container_link, + page_iterator_class=query_iterable.QueryIterable + ) - def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None): + def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None, **kwargs): """Queries documents change feed in a collection. :param str collection_link: @@ -868,11 +902,11 @@ def QueryItemsChangeFeed(self, collection_link, options=None, response_hook=None partition_key_range_id = options["partitionKeyRangeId"] return self._QueryChangeFeed( - collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook + collection_link, "Documents", options, partition_key_range_id, response_hook=response_hook, **kwargs ) def _QueryChangeFeed( - self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None + self, collection_link, resource_type, options=None, partition_key_range_id=None, response_hook=None, **kwargs ): """Queries change feed of a resource in a collection. @@ -919,13 +953,21 @@ def fetch_fn(options): options, partition_key_range_id, response_hook=response_hook, + **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, None, options, fetch_fn, collection_link) + return ItemPaged( + self, + None, + options, + fetch_function=fetch_fn, + collection_link=collection_link, + page_iterator_class=query_iterable.QueryIterable + ) - def _ReadPartitionKeyRanges(self, collection_link, feed_options=None): + def _ReadPartitionKeyRanges(self, collection_link, feed_options=None, **kwargs): """Reads Partition Key Ranges. :param str collection_link: @@ -941,9 +983,9 @@ def _ReadPartitionKeyRanges(self, collection_link, feed_options=None): if feed_options is None: feed_options = {} - return self._QueryPartitionKeyRanges(collection_link, None, feed_options) + return self._QueryPartitionKeyRanges(collection_link, None, feed_options, **kwargs) - def _QueryPartitionKeyRanges(self, collection_link, query, options=None): + def _QueryPartitionKeyRanges(self, collection_link, query, options=None, **kwargs): """Queries Partition Key Ranges in a collection. :param str collection_link: @@ -967,17 +1009,20 @@ def _QueryPartitionKeyRanges(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], lambda _, b: b, query, options + path, "pkranges", collection_id, lambda r: r["PartitionKeyRanges"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateItem(self, database_or_Container_link, document, options=None): + def CreateItem(self, database_or_container_link, document, options=None, **kwargs): """Creates a document in a collection. - :param str database_or_Container_link: + :param str database_or_container_link: The link to the database when using partitioning, otherwise link to the document collection. :param dict document: The Azure Cosmos document to create. @@ -1005,18 +1050,18 @@ def CreateItem(self, database_or_Container_link, document, options=None): # We check the link to be document collection link since it can be database # link in case of client side partitioning - if base.IsItemContainerLink(database_or_Container_link): - options = self._AddPartitionKey(database_or_Container_link, document, options) + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) collection_id, document, path = self._GetContainerIdWithPathForItem( - database_or_Container_link, document, options + database_or_container_link, document, options ) - return self.Create(document, path, "docs", collection_id, None, options) + return self.Create(document, path, "docs", collection_id, None, options, **kwargs) - def UpsertItem(self, database_or_Container_link, document, options=None): + def UpsertItem(self, database_or_container_link, document, options=None, **kwargs): """Upserts a document in a collection. - :param str database_or_Container_link: + :param str database_or_container_link: The link to the database when using partitioning, otherwise link to the document collection. :param dict document: The Azure Cosmos document to upsert. @@ -1044,13 +1089,13 @@ def UpsertItem(self, database_or_Container_link, document, options=None): # We check the link to be document collection link since it can be database # link in case of client side partitioning - if base.IsItemContainerLink(database_or_Container_link): - options = self._AddPartitionKey(database_or_Container_link, document, options) + if base.IsItemContainerLink(database_or_container_link): + options = self._AddPartitionKey(database_or_container_link, document, options) collection_id, document, path = self._GetContainerIdWithPathForItem( - database_or_Container_link, document, options + database_or_container_link, document, options ) - return self.Upsert(document, path, "docs", collection_id, None, options) + return self.Upsert(document, path, "docs", collection_id, None, options, **kwargs) PartitionResolverErrorMessage = ( "Couldn't find any partition resolvers for the database link provided. " @@ -1060,10 +1105,10 @@ def UpsertItem(self, database_or_Container_link, document, options=None): ) # Gets the collection id and path for the document - def _GetContainerIdWithPathForItem(self, database_or_Container_link, document, options): + def _GetContainerIdWithPathForItem(self, database_or_container_link, document, options): - if not database_or_Container_link: - raise ValueError("database_or_Container_link is None or empty.") + if not database_or_container_link: + raise ValueError("database_or_container_link is None or empty.") if document is None: raise ValueError("document is None.") @@ -1073,10 +1118,10 @@ def _GetContainerIdWithPathForItem(self, database_or_Container_link, document, o if not document.get("id") and not options.get("disableAutomaticIdGeneration"): document["id"] = base.GenerateGuidId() - collection_link = database_or_Container_link + collection_link = database_or_container_link - if base.IsDatabaseLink(database_or_Container_link): - partition_resolver = self.GetPartitionResolver(database_or_Container_link) + if base.IsDatabaseLink(database_or_container_link): + partition_resolver = self.GetPartitionResolver(database_or_container_link) if partition_resolver is not None: collection_link = partition_resolver.ResolveForCreate(document) @@ -1087,7 +1132,7 @@ def _GetContainerIdWithPathForItem(self, database_or_Container_link, document, o collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, document, path - def ReadItem(self, document_link, options=None): + def ReadItem(self, document_link, options=None, **kwargs): """Reads a document. :param str document_link: @@ -1106,9 +1151,9 @@ def ReadItem(self, document_link, options=None): path = base.GetPathFromLink(document_link) document_id = base.GetResourceIdOrFullNameFromLink(document_link) - return self.Read(path, "docs", document_id, None, options) + return self.Read(path, "docs", document_id, None, options, **kwargs) - def ReadTriggers(self, collection_link, options=None): + def ReadTriggers(self, collection_link, options=None, **kwargs): """Reads all triggers in a collection. :param str collection_link: @@ -1125,9 +1170,9 @@ def ReadTriggers(self, collection_link, options=None): if options is None: options = {} - return self.QueryTriggers(collection_link, None, options) + return self.QueryTriggers(collection_link, None, options, **kwargs) - def QueryTriggers(self, collection_link, query, options=None): + def QueryTriggers(self, collection_link, query, options=None, **kwargs): """Queries triggers in a collection. :param str collection_link: @@ -1151,14 +1196,16 @@ def QueryTriggers(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options + path, "triggers", collection_id, lambda r: r["Triggers"], lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateTrigger(self, collection_link, trigger, options=None): + def CreateTrigger(self, collection_link, trigger, options=None, **kwargs): """Creates a trigger in a collection. :param str collection_link: @@ -1177,9 +1224,9 @@ def CreateTrigger(self, collection_link, trigger, options=None): options = {} collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) - return self.Create(trigger, path, "triggers", collection_id, None, options) + return self.Create(trigger, path, "triggers", collection_id, None, options, **kwargs) - def UpsertTrigger(self, collection_link, trigger, options=None): + def UpsertTrigger(self, collection_link, trigger, options=None, **kwargs): """Upserts a trigger in a collection. :param str collection_link: @@ -1198,7 +1245,7 @@ def UpsertTrigger(self, collection_link, trigger, options=None): options = {} collection_id, path, trigger = self._GetContainerIdWithPathForTrigger(collection_link, trigger) - return self.Upsert(trigger, path, "triggers", collection_id, None, options) + return self.Upsert(trigger, path, "triggers", collection_id, None, options, **kwargs) def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(trigger) @@ -1212,7 +1259,7 @@ def _GetContainerIdWithPathForTrigger(self, collection_link, trigger): # pylint collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, path, trigger - def ReadTrigger(self, trigger_link, options=None): + def ReadTrigger(self, trigger_link, options=None, **kwargs): """Reads a trigger. :param str trigger_link: @@ -1231,9 +1278,9 @@ def ReadTrigger(self, trigger_link, options=None): path = base.GetPathFromLink(trigger_link) trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.Read(path, "triggers", trigger_id, None, options) + return self.Read(path, "triggers", trigger_id, None, options, **kwargs) - def ReadUserDefinedFunctions(self, collection_link, options=None): + def ReadUserDefinedFunctions(self, collection_link, options=None, **kwargs): """Reads all user defined functions in a collection. :param str collection_link: @@ -1250,9 +1297,9 @@ def ReadUserDefinedFunctions(self, collection_link, options=None): if options is None: options = {} - return self.QueryUserDefinedFunctions(collection_link, None, options) + return self.QueryUserDefinedFunctions(collection_link, None, options, **kwargs) - def QueryUserDefinedFunctions(self, collection_link, query, options=None): + def QueryUserDefinedFunctions(self, collection_link, query, options=None, **kwargs): """Queries user defined functions in a collection. :param str collection_link: @@ -1276,14 +1323,17 @@ def QueryUserDefinedFunctions(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], lambda _, b: b, query, options + path, "udfs", collection_id, lambda r: r["UserDefinedFunctions"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateUserDefinedFunction(self, collection_link, udf, options=None): + def CreateUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): """Creates a user defined function in a collection. :param str collection_link: @@ -1302,9 +1352,9 @@ def CreateUserDefinedFunction(self, collection_link, udf, options=None): options = {} collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) - return self.Create(udf, path, "udfs", collection_id, None, options) + return self.Create(udf, path, "udfs", collection_id, None, options, **kwargs) - def UpsertUserDefinedFunction(self, collection_link, udf, options=None): + def UpsertUserDefinedFunction(self, collection_link, udf, options=None, **kwargs): """Upserts a user defined function in a collection. :param str collection_link: @@ -1323,7 +1373,7 @@ def UpsertUserDefinedFunction(self, collection_link, udf, options=None): options = {} collection_id, path, udf = self._GetContainerIdWithPathForUDF(collection_link, udf) - return self.Upsert(udf, path, "udfs", collection_id, None, options) + return self.Upsert(udf, path, "udfs", collection_id, None, options, **kwargs) def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(udf) @@ -1337,7 +1387,7 @@ def _GetContainerIdWithPathForUDF(self, collection_link, udf): # pylint: disabl collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, path, udf - def ReadUserDefinedFunction(self, udf_link, options=None): + def ReadUserDefinedFunction(self, udf_link, options=None, **kwargs): """Reads a user defined function. :param str udf_link: @@ -1356,9 +1406,9 @@ def ReadUserDefinedFunction(self, udf_link, options=None): path = base.GetPathFromLink(udf_link) udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.Read(path, "udfs", udf_id, None, options) + return self.Read(path, "udfs", udf_id, None, options, **kwargs) - def ReadStoredProcedures(self, collection_link, options=None): + def ReadStoredProcedures(self, collection_link, options=None, **kwargs): """Reads all store procedures in a collection. :param str collection_link: @@ -1375,9 +1425,9 @@ def ReadStoredProcedures(self, collection_link, options=None): if options is None: options = {} - return self.QueryStoredProcedures(collection_link, None, options) + return self.QueryStoredProcedures(collection_link, None, options, **kwargs) - def QueryStoredProcedures(self, collection_link, query, options=None): + def QueryStoredProcedures(self, collection_link, query, options=None, **kwargs): """Queries stored procedures in a collection. :param str collection_link: @@ -1401,14 +1451,17 @@ def QueryStoredProcedures(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "sprocs", collection_id, lambda r: r["StoredProcedures"], lambda _, b: b, query, options + path, "sprocs", collection_id, lambda r: r["StoredProcedures"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def CreateStoredProcedure(self, collection_link, sproc, options=None): + def CreateStoredProcedure(self, collection_link, sproc, options=None, **kwargs): """Creates a stored procedure in a collection. :param str collection_link: @@ -1427,9 +1480,9 @@ def CreateStoredProcedure(self, collection_link, sproc, options=None): options = {} collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) - return self.Create(sproc, path, "sprocs", collection_id, None, options) + return self.Create(sproc, path, "sprocs", collection_id, None, options, **kwargs) - def UpsertStoredProcedure(self, collection_link, sproc, options=None): + def UpsertStoredProcedure(self, collection_link, sproc, options=None, **kwargs): """Upserts a stored procedure in a collection. :param str collection_link: @@ -1448,7 +1501,7 @@ def UpsertStoredProcedure(self, collection_link, sproc, options=None): options = {} collection_id, path, sproc = self._GetContainerIdWithPathForSproc(collection_link, sproc) - return self.Upsert(sproc, path, "sprocs", collection_id, None, options) + return self.Upsert(sproc, path, "sprocs", collection_id, None, options, **kwargs) def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(sproc) @@ -1461,7 +1514,7 @@ def _GetContainerIdWithPathForSproc(self, collection_link, sproc): # pylint: di collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) return collection_id, path, sproc - def ReadStoredProcedure(self, sproc_link, options=None): + def ReadStoredProcedure(self, sproc_link, options=None, **kwargs): """Reads a stored procedure. :param str sproc_link: @@ -1480,9 +1533,9 @@ def ReadStoredProcedure(self, sproc_link, options=None): path = base.GetPathFromLink(sproc_link) sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.Read(path, "sprocs", sproc_id, None, options) + return self.Read(path, "sprocs", sproc_id, None, options, **kwargs) - def ReadConflicts(self, collection_link, feed_options=None): + def ReadConflicts(self, collection_link, feed_options=None, **kwargs): """Reads conflicts. :param str collection_link: @@ -1498,9 +1551,9 @@ def ReadConflicts(self, collection_link, feed_options=None): if feed_options is None: feed_options = {} - return self.QueryConflicts(collection_link, None, feed_options) + return self.QueryConflicts(collection_link, None, feed_options, **kwargs) - def QueryConflicts(self, collection_link, query, options=None): + def QueryConflicts(self, collection_link, query, options=None, **kwargs): """Queries conflicts in a collection. :param str collection_link: @@ -1524,14 +1577,17 @@ def QueryConflicts(self, collection_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "conflicts", collection_id, lambda r: r["Conflicts"], lambda _, b: b, query, options + path, "conflicts", collection_id, lambda r: r["Conflicts"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) - def ReadConflict(self, conflict_link, options=None): + def ReadConflict(self, conflict_link, options=None, **kwargs): """Reads a conflict. :param str conflict_link: @@ -1549,9 +1605,9 @@ def ReadConflict(self, conflict_link, options=None): path = base.GetPathFromLink(conflict_link) conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) - return self.Read(path, "conflicts", conflict_id, None, options) + return self.Read(path, "conflicts", conflict_id, None, options, **kwargs) - def DeleteContainer(self, collection_link, options=None): + def DeleteContainer(self, collection_link, options=None, **kwargs): """Deletes a collection. :param str collection_link: @@ -1570,9 +1626,9 @@ def DeleteContainer(self, collection_link, options=None): path = base.GetPathFromLink(collection_link) collection_id = base.GetResourceIdOrFullNameFromLink(collection_link) - return self.DeleteResource(path, "colls", collection_id, None, options) + return self.DeleteResource(path, "colls", collection_id, None, options, **kwargs) - def ReplaceItem(self, document_link, new_document, options=None): + def ReplaceItem(self, document_link, new_document, options=None, **kwargs): """Replaces a document and returns it. :param str document_link: @@ -1605,9 +1661,9 @@ def ReplaceItem(self, document_link, new_document, options=None): collection_link = base.GetItemContainerLink(document_link) options = self._AddPartitionKey(collection_link, new_document, options) - return self.Replace(new_document, path, "docs", document_id, None, options) + return self.Replace(new_document, path, "docs", document_id, None, options, **kwargs) - def DeleteItem(self, document_link, options=None): + def DeleteItem(self, document_link, options=None, **kwargs): """Deletes a document. :param str document_link: @@ -1626,9 +1682,9 @@ def DeleteItem(self, document_link, options=None): path = base.GetPathFromLink(document_link) document_id = base.GetResourceIdOrFullNameFromLink(document_link) - return self.DeleteResource(path, "docs", document_id, None, options) + return self.DeleteResource(path, "docs", document_id, None, options, **kwargs) - def CreateAttachment(self, document_link, attachment, options=None): + def CreateAttachment(self, document_link, attachment, options=None, **kwargs): """Creates an attachment in a document. :param str document_link: @@ -1648,9 +1704,9 @@ def CreateAttachment(self, document_link, attachment, options=None): options = {} document_id, path = self._GetItemIdWithPathForAttachment(attachment, document_link) - return self.Create(attachment, path, "attachments", document_id, None, options) + return self.Create(attachment, path, "attachments", document_id, None, options, **kwargs) - def UpsertAttachment(self, document_link, attachment, options=None): + def UpsertAttachment(self, document_link, attachment, options=None, **kwargs): """Upserts an attachment in a document. :param str document_link: @@ -1670,7 +1726,7 @@ def UpsertAttachment(self, document_link, attachment, options=None): options = {} document_id, path = self._GetItemIdWithPathForAttachment(attachment, document_link) - return self.Upsert(attachment, path, "attachments", document_id, None, options) + return self.Upsert(attachment, path, "attachments", document_id, None, options, **kwargs) def _GetItemIdWithPathForAttachment(self, attachment, document_link): # pylint: disable=no-self-use CosmosClientConnection.__ValidateResource(attachment) @@ -1678,7 +1734,7 @@ def _GetItemIdWithPathForAttachment(self, attachment, document_link): # pylint: document_id = base.GetResourceIdOrFullNameFromLink(document_link) return document_id, path - def CreateAttachmentAndUploadMedia(self, document_link, readable_stream, options=None): + def CreateAttachmentAndUploadMedia(self, document_link, readable_stream, options=None, **kwargs): """Creates an attachment and upload media. :param str document_link: @@ -1697,9 +1753,9 @@ def CreateAttachmentAndUploadMedia(self, document_link, readable_stream, options options = {} document_id, initial_headers, path = self._GetItemIdWithPathForAttachmentMedia(document_link, options) - return self.Create(readable_stream, path, "attachments", document_id, initial_headers, options) + return self.Create(readable_stream, path, "attachments", document_id, initial_headers, options, **kwargs) - def UpsertAttachmentAndUploadMedia(self, document_link, readable_stream, options=None): + def UpsertAttachmentAndUploadMedia(self, document_link, readable_stream, options=None, **kwargs): """Upserts an attachment and upload media. :param str document_link: @@ -1718,7 +1774,7 @@ def UpsertAttachmentAndUploadMedia(self, document_link, readable_stream, options options = {} document_id, initial_headers, path = self._GetItemIdWithPathForAttachmentMedia(document_link, options) - return self.Upsert(readable_stream, path, "attachments", document_id, initial_headers, options) + return self.Upsert(readable_stream, path, "attachments", document_id, initial_headers, options, **kwargs) def _GetItemIdWithPathForAttachmentMedia(self, document_link, options): initial_headers = dict(self.default_headers) @@ -1736,7 +1792,7 @@ def _GetItemIdWithPathForAttachmentMedia(self, document_link, options): document_id = base.GetResourceIdOrFullNameFromLink(document_link) return document_id, initial_headers, path - def ReadAttachment(self, attachment_link, options=None): + def ReadAttachment(self, attachment_link, options=None, **kwargs): """Reads an attachment. :param str attachment_link: @@ -1755,9 +1811,9 @@ def ReadAttachment(self, attachment_link, options=None): path = base.GetPathFromLink(attachment_link) attachment_id = base.GetResourceIdOrFullNameFromLink(attachment_link) - return self.Read(path, "attachments", attachment_id, None, options) + return self.Read(path, "attachments", attachment_id, None, options, **kwargs) - def ReadAttachments(self, document_link, options=None): + def ReadAttachments(self, document_link, options=None, **kwargs): """Reads all attachments in a document. :param str document_link: @@ -1774,9 +1830,9 @@ def ReadAttachments(self, document_link, options=None): if options is None: options = {} - return self.QueryAttachments(document_link, None, options) + return self.QueryAttachments(document_link, None, options, **kwargs) - def QueryAttachments(self, document_link, query, options=None): + def QueryAttachments(self, document_link, query, options=None, **kwargs): """Queries attachments in a document. :param str document_link: @@ -1800,12 +1856,15 @@ def QueryAttachments(self, document_link, query, options=None): def fetch_fn(options): return ( self.__QueryFeed( - path, "attachments", document_id, lambda r: r["Attachments"], lambda _, b: b, query, options + path, "attachments", document_id, lambda r: r["Attachments"], + lambda _, b: b, query, options, **kwargs ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) def ReadMedia(self, media_link, **kwargs): """Reads a media. @@ -1876,7 +1935,7 @@ def UpdateMedia(self, media_link, readable_stream, options=None, **kwargs): self._UpdateSessionIfRequired(headers, result, self.last_response_headers) return result - def ReplaceAttachment(self, attachment_link, attachment, options=None): + def ReplaceAttachment(self, attachment_link, attachment, options=None, **kwargs): """Replaces an attachment and returns it. :param str attachment_link: @@ -1897,9 +1956,9 @@ def ReplaceAttachment(self, attachment_link, attachment, options=None): CosmosClientConnection.__ValidateResource(attachment) path = base.GetPathFromLink(attachment_link) attachment_id = base.GetResourceIdOrFullNameFromLink(attachment_link) - return self.Replace(attachment, path, "attachments", attachment_id, None, options) + return self.Replace(attachment, path, "attachments", attachment_id, None, options, **kwargs) - def DeleteAttachment(self, attachment_link, options=None): + def DeleteAttachment(self, attachment_link, options=None, **kwargs): """Deletes an attachment. :param str attachment_link: @@ -1918,9 +1977,9 @@ def DeleteAttachment(self, attachment_link, options=None): path = base.GetPathFromLink(attachment_link) attachment_id = base.GetResourceIdOrFullNameFromLink(attachment_link) - return self.DeleteResource(path, "attachments", attachment_id, None, options) + return self.DeleteResource(path, "attachments", attachment_id, None, options, **kwargs) - def ReplaceTrigger(self, trigger_link, trigger, options=None): + def ReplaceTrigger(self, trigger_link, trigger, options=None, **kwargs): """Replaces a trigger and returns it. :param str trigger_link: @@ -1947,9 +2006,9 @@ def ReplaceTrigger(self, trigger_link, trigger, options=None): path = base.GetPathFromLink(trigger_link) trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.Replace(trigger, path, "triggers", trigger_id, None, options) + return self.Replace(trigger, path, "triggers", trigger_id, None, options, **kwargs) - def DeleteTrigger(self, trigger_link, options=None): + def DeleteTrigger(self, trigger_link, options=None, **kwargs): """Deletes a trigger. :param str trigger_link: @@ -1968,9 +2027,9 @@ def DeleteTrigger(self, trigger_link, options=None): path = base.GetPathFromLink(trigger_link) trigger_id = base.GetResourceIdOrFullNameFromLink(trigger_link) - return self.DeleteResource(path, "triggers", trigger_id, None, options) + return self.DeleteResource(path, "triggers", trigger_id, None, options, **kwargs) - def ReplaceUserDefinedFunction(self, udf_link, udf, options=None): + def ReplaceUserDefinedFunction(self, udf_link, udf, options=None, **kwargs): """Replaces a user defined function and returns it. :param str udf_link: @@ -1997,9 +2056,9 @@ def ReplaceUserDefinedFunction(self, udf_link, udf, options=None): path = base.GetPathFromLink(udf_link) udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.Replace(udf, path, "udfs", udf_id, None, options) + return self.Replace(udf, path, "udfs", udf_id, None, options, **kwargs) - def DeleteUserDefinedFunction(self, udf_link, options=None): + def DeleteUserDefinedFunction(self, udf_link, options=None, **kwargs): """Deletes a user defined function. :param str udf_link: @@ -2018,7 +2077,7 @@ def DeleteUserDefinedFunction(self, udf_link, options=None): path = base.GetPathFromLink(udf_link) udf_id = base.GetResourceIdOrFullNameFromLink(udf_link) - return self.DeleteResource(path, "udfs", udf_id, None, options) + return self.DeleteResource(path, "udfs", udf_id, None, options, **kwargs) def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): """Executes a store procedure. @@ -2054,7 +2113,7 @@ def ExecuteStoredProcedure(self, sproc_link, params, options=None, **kwargs): result, self.last_response_headers = self.__Post(path, request_params, params, headers, **kwargs) return result - def ReplaceStoredProcedure(self, sproc_link, sproc, options=None): + def ReplaceStoredProcedure(self, sproc_link, sproc, options=None, **kwargs): """Replaces a stored procedure and returns it. :param str sproc_link: @@ -2081,9 +2140,9 @@ def ReplaceStoredProcedure(self, sproc_link, sproc, options=None): path = base.GetPathFromLink(sproc_link) sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.Replace(sproc, path, "sprocs", sproc_id, None, options) + return self.Replace(sproc, path, "sprocs", sproc_id, None, options, **kwargs) - def DeleteStoredProcedure(self, sproc_link, options=None): + def DeleteStoredProcedure(self, sproc_link, options=None, **kwargs): """Deletes a stored procedure. :param str sproc_link: @@ -2102,9 +2161,9 @@ def DeleteStoredProcedure(self, sproc_link, options=None): path = base.GetPathFromLink(sproc_link) sproc_id = base.GetResourceIdOrFullNameFromLink(sproc_link) - return self.DeleteResource(path, "sprocs", sproc_id, None, options) + return self.DeleteResource(path, "sprocs", sproc_id, None, options, **kwargs) - def DeleteConflict(self, conflict_link, options=None): + def DeleteConflict(self, conflict_link, options=None, **kwargs): """Deletes a conflict. :param str conflict_link: @@ -2123,9 +2182,9 @@ def DeleteConflict(self, conflict_link, options=None): path = base.GetPathFromLink(conflict_link) conflict_id = base.GetResourceIdOrFullNameFromLink(conflict_link) - return self.DeleteResource(path, "conflicts", conflict_id, None, options) + return self.DeleteResource(path, "conflicts", conflict_id, None, options, **kwargs) - def ReplaceOffer(self, offer_link, offer): + def ReplaceOffer(self, offer_link, offer, **kwargs): """Replaces an offer and returns it. :param str offer_link: @@ -2141,9 +2200,9 @@ def ReplaceOffer(self, offer_link, offer): CosmosClientConnection.__ValidateResource(offer) path = base.GetPathFromLink(offer_link) offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) - return self.Replace(offer, path, "offers", offer_id, None, None) + return self.Replace(offer, path, "offers", offer_id, None, None, **kwargs) - def ReadOffer(self, offer_link): + def ReadOffer(self, offer_link, **kwargs): """Reads an offer. :param str offer_link: @@ -2157,9 +2216,9 @@ def ReadOffer(self, offer_link): """ path = base.GetPathFromLink(offer_link) offer_id = base.GetResourceIdOrFullNameFromLink(offer_link) - return self.Read(path, "offers", offer_id, None, {}) + return self.Read(path, "offers", offer_id, None, {}, **kwargs) - def ReadOffers(self, options=None): + def ReadOffers(self, options=None, **kwargs): """Reads all offers. :param dict options: @@ -2174,9 +2233,9 @@ def ReadOffers(self, options=None): if options is None: options = {} - return self.QueryOffers(None, options) + return self.QueryOffers(None, options, **kwargs) - def QueryOffers(self, query, options=None): + def QueryOffers(self, query, options=None, **kwargs): """Query for all offers. :param (str or dict) query: @@ -2194,11 +2253,15 @@ def QueryOffers(self, query, options=None): def fetch_fn(options): return ( - self.__QueryFeed("/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options), + self.__QueryFeed( + "/offers", "offers", "", lambda r: r["Offers"], lambda _, b: b, query, options, **kwargs + ), self.last_response_headers, ) - return query_iterable.QueryIterable(self, query, options, fetch_fn) + return ItemPaged( + self, query, options, fetch_function=fetch_fn, page_iterator_class=query_iterable.QueryIterable + ) def GetDatabaseAccount(self, url_connection=None, **kwargs): """Gets database account info. @@ -2394,12 +2457,12 @@ def DeleteResource(self, path, typ, id, initial_headers, options=None, **kwargs) return result - def __Get(self, path, request_params, headers, **kwargs): + def __Get(self, path, request_params, req_headers, **kwargs): """Azure Cosmos 'GET' http request. :params str url: :params str path: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2407,7 +2470,7 @@ def __Get(self, path, request_params, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.get(url=path, headers=headers) + request = self.pipeline_client.get(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2419,13 +2482,13 @@ def __Get(self, path, request_params, headers, **kwargs): **kwargs ) - def __Post(self, path, request_params, body, headers, **kwargs): + def __Post(self, path, request_params, body, req_headers, **kwargs): """Azure Cosmos 'POST' http request. :params str url: :params str path: :params (str, unicode, dict) body: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2433,7 +2496,7 @@ def __Post(self, path, request_params, body, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.post(url=path, headers=headers) + request = self.pipeline_client.post(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2445,13 +2508,13 @@ def __Post(self, path, request_params, body, headers, **kwargs): **kwargs ) - def __Put(self, path, request_params, body, headers, **kwargs): + def __Put(self, path, request_params, body, req_headers, **kwargs): """Azure Cosmos 'PUT' http request. :params str url: :params str path: :params (str, unicode, dict) body: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2459,7 +2522,7 @@ def __Put(self, path, request_params, body, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.put(url=path, headers=headers) + request = self.pipeline_client.put(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2471,12 +2534,12 @@ def __Put(self, path, request_params, body, headers, **kwargs): **kwargs ) - def __Delete(self, path, request_params, headers, **kwargs): + def __Delete(self, path, request_params, req_headers, **kwargs): """Azure Cosmos 'DELETE' http request. :params str url: :params str path: - :params dict headers: + :params dict req_headers: :return: Tuple of (result, headers). @@ -2484,7 +2547,7 @@ def __Delete(self, path, request_params, headers, **kwargs): tuple of (dict, dict) """ - request = self.pipeline_client.delete(url=path, headers=headers) + request = self.pipeline_client.delete(url=path, headers=req_headers) return synchronized_request.SynchronizedRequest( client=self, request_params=request_params, @@ -2496,7 +2559,7 @@ def __Delete(self, path, request_params, headers, **kwargs): **kwargs ) - def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None): + def QueryFeed(self, path, collection_id, query, options, partition_key_range_id=None, **kwargs): """Query Feed for Document Collection resource. :param str path: @@ -2522,6 +2585,7 @@ def QueryFeed(self, path, collection_id, query, options, partition_key_range_id= query, options, partition_key_range_id, + **kwargs ), self.last_response_headers, ) @@ -2598,8 +2662,8 @@ def __GetBodiesFromQueryResult(result): # Query operations will use ReadEndpoint even though it uses POST(for regular query operations) request_params = _request_object.RequestObject(typ, documents._OperationType.SqlQuery) - headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) - result, self.last_response_headers = self.__Post(path, request_params, query, headers, **kwargs) + req_headers = base.GetHeaders(self, initial_headers, "post", path, id_, typ, options, partition_key_range_id) + result, self.last_response_headers = self.__Post(path, request_params, query, req_headers, **kwargs) if response_hook: response_hook(self.last_response_headers, result) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py index 2e07955ab0ea..6b5e52769193 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_default_retry_policy.py @@ -66,7 +66,7 @@ def needsRetry(self, error_code): def ShouldRetry(self, exception): """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py index 90422376d450..2f773de8735a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_endpoint_discovery_retry_policy.py @@ -61,7 +61,7 @@ def __init__(self, connection_policy, global_endpoint_manager, *args): def ShouldRetry(self, exception): # pylint: disable=unused-argument """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py index 19d29ab45866..cb554127c276 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/document_producer.py @@ -128,7 +128,8 @@ def compare(self, doc_producer1, doc_producer2): # pylint: disable=no-self-use ) -class _OrderByHelper: +class _OrderByHelper(object): + @staticmethod def getTypeOrd(orderby_item): """Returns the ordinal of the value of the item pair in the dictionary. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py index b2c1752b40c3..2c37510a63a4 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_execution_context/execution_dispatcher.py @@ -24,7 +24,7 @@ import json from six.moves import xrange -from azure.cosmos.errors import HTTPFailure +from azure.cosmos.errors import CosmosHttpResponseError from azure.cosmos._execution_context.base_execution_context import _QueryExecutionContextBase from azure.cosmos._execution_context.base_execution_context import _DefaultQueryExecutionContext from azure.cosmos._execution_context.query_execution_info import _PartitionedQueryExecutionInfo @@ -42,7 +42,7 @@ def _is_partitioned_execution_info(e): def _get_partitioned_execution_info(e): - error_msg = json.loads(e._http_error_message) + error_msg = json.loads(e.http_error_message) return _PartitionedQueryExecutionInfo(json.loads(error_msg["additionalErrorInfo"])) @@ -76,7 +76,7 @@ def next(self): """ try: return next(self._execution_context) - except HTTPFailure as e: + except CosmosHttpResponseError as e: if _is_partitioned_execution_info(e): query_execution_info = _get_partitioned_execution_info(e) self._execution_context = self._create_pipelined_execution_context(query_execution_info) @@ -97,7 +97,7 @@ def fetch_next_block(self): """ try: return self._execution_context.fetch_next_block() - except HTTPFailure as e: + except CosmosHttpResponseError as e: if _is_partitioned_execution_info(e): query_execution_info = _get_partitioned_execution_info(e) self._execution_context = self._create_pipelined_execution_context(query_execution_info) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py index b4bb6cda0703..d4dc37ee7533 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_global_endpoint_manager.py @@ -126,13 +126,13 @@ def _GetDatabaseAccount(self): # specified (by creating a locational endpoint) and keeping eating the exception # until we get the database account and return None at the end, if we are not able # to get that info from any endpoints - except errors.HTTPFailure: + except errors.CosmosHttpResponseError: for location_name in self.PreferredLocations: locational_endpoint = _GlobalEndpointManager.GetLocationalEndpoint(self.DefaultEndpoint, location_name) try: database_account = self._GetDatabaseAccountStub(locational_endpoint) return database_account - except errors.HTTPFailure: + except errors.CosmosHttpResponseError: pass return None diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py index 65f8a8fd2a2d..d1cf600be217 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_query_iterable.py @@ -21,18 +21,29 @@ """Iterable query results in the Azure Cosmos database service. """ +from azure.core.paging import PageIterator # type: ignore from azure.cosmos._execution_context import execution_dispatcher from azure.cosmos._execution_context import base_execution_context # pylint: disable=protected-access -class QueryIterable(object): +class QueryIterable(PageIterator): """Represents an iterable object of the query results. QueryIterable is a wrapper for query execution context. """ - def __init__(self, client, query, options, fetch_function, collection_link=None): + def __init__( + self, + client, + query, + options, + fetch_function=None, + collection_link=None, + database_link=None, + partition_key=None, + continuation_token=None, + ): """ Instantiates a QueryIterable for non-client side partitioning queries. _ProxyQueryExecutionContext will be used as the internal query execution context @@ -56,41 +67,19 @@ def __init__(self, client, query, options, fetch_function, collection_link=None) self.retry_options = client.connection_policy.RetryOptions self._query = query self._options = options + if continuation_token: + options['continuation'] = continuation_token self._fetch_function = fetch_function self._collection_link = collection_link - self._ex_context = None - - @classmethod - def PartitioningQueryIterable(cls, client, query, options, database_link, partition_key): - """ - Represents a client side partitioning query iterable. - - This constructor instantiates a QueryIterable for - client side partitioning queries, and sets _MultiCollectionQueryExecutionContext - as the internal execution context. - - :param CosmosClient client: - Instance of document client - :param (str or dict) options: - :param dict options: - The request options for the request. - :param str database_link: - Database self link or ID based link - :param str partition_key: - Partition key for the query - """ - # This will call the base constructor(__init__ method above) - - self = cls(client, query, options, None, None) - self._database_link = database_link # pylint: disable=attribute-defined-outside-init - self._partition_key = partition_key # pylint: disable=attribute-defined-outside-init - - return self + self._database_link = database_link + self._partition_key = partition_key + self._ex_context = self._create_execution_context() + super(QueryIterable, self).__init__(self._fetch_next, self._unpack, continuation_token=continuation_token) def _create_execution_context(self): """instantiates the internal query execution context based. """ - if hasattr(self, "_database_link"): + if self._database_link: # client side partitioning query return base_execution_context._MultiCollectionQueryExecutionContext( self._client, self._options, self._database_link, self._query, self._partition_key @@ -99,29 +88,16 @@ def _create_execution_context(self): self._client, self._collection_link, self._query, self._options, self._fetch_function ) - def __iter__(self): - """Makes this class iterable. - """ - return self.Iterator(self) - - class Iterator(object): - def __init__(self, iterable): - self._iterable = iterable - self._finished = False - self._ex_context = iterable._create_execution_context() - - def __iter__(self): - # Always returns self - return self + def _unpack(self, block): + continuation = None + if self._client.last_response_headers: + continuation = self._client.last_response_headers.get("x-ms-continuation") or \ + self._client.last_response_headers.get('etag') + if block: + self._did_a_call_already = False + return continuation, block - def __next__(self): - return next(self._ex_context) - - # Also support Python 3.x iteration - def next(self): - return self.__next__() - - def fetch_next_block(self): + def _fetch_next(self, *args): # pylint: disable=unused-argument """Returns a block of results with respecting retry policy. This method only exists for backward compatibility reasons. (Because QueryIterable @@ -132,9 +108,7 @@ def fetch_next_block(self): :rtype: list """ - - if self._ex_context is None: - # initiates execution context for the first time - self._ex_context = self._create_execution_context() - - return self._ex_context.fetch_next_block() + block = self._ex_context.fetch_next_block() + if not block: + raise StopIteration + return block diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py index 8e027e0fcc2e..e21454ec7792 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_resource_throttle_retry_policy.py @@ -36,7 +36,7 @@ def __init__(self, max_retry_attempt_count, fixed_retry_interval_in_milliseconds def ShouldRetry(self, exception): """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py index e787857de9e8..df575cb27d36 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_retry_utility.py @@ -80,7 +80,7 @@ def Execute(client, global_endpoint_manager, function, *args, **kwargs): ] = resourceThrottle_retry_policy.cummulative_wait_time_in_milliseconds return result - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: retry_policy = None if e.status_code == StatusCodes.FORBIDDEN and e.sub_status == SubStatusCodes.WRITE_FORBIDDEN: retry_policy = endpointDiscovery_retry_policy diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py index 6396de351536..fc9e640b0899 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_runtime_constants.py @@ -23,7 +23,7 @@ """ -class MediaTypes: +class MediaTypes(object): """Constants of media types. http://www.iana.org/assignments/media-types/media-types.xhtml diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py index c80a53c0c5f1..dd1a573eaa4a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_session.py @@ -29,7 +29,7 @@ from . import _base from . import http_constants from ._vector_session_token import VectorSessionToken -from .errors import HTTPFailure +from .errors import CosmosHttpResponseError class SessionContainer(object): @@ -196,15 +196,15 @@ def parse_session_token(response_headers): id_ = tokens[0] sessionToken = VectorSessionToken.create(tokens[1]) if sessionToken is None: - raise HTTPFailure( - http_constants.StatusCodes.INTERNAL_SERVER_ERROR, - "Could not parse the received session token: %s" % tokens[1], + raise CosmosHttpResponseError( + status_code=http_constants.StatusCodes.INTERNAL_SERVER_ERROR, + message="Could not parse the received session token: %s" % tokens[1], ) id_to_sessionlsn[id_] = sessionToken return id_to_sessionlsn -class Session: +class Session(object): """ State of a Azure Cosmos session. This session object can be shared across clients within the same process diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py index 9f2c14be6a1f..01ae7778a7f4 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_session_retry_policy.py @@ -62,7 +62,7 @@ def __init__(self, endpoint_discovery_enable, global_endpoint_manager, *args): def ShouldRetry(self, _exception): """Returns true if should retry based on the passed-in exception. - :param (errors.HTTPFailure instance) exception: + :param (errors.CosmosHttpResponseError instance) exception: :rtype: boolean diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py index 618541feb6b0..7f1b900303ca 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_synchronized_request.py @@ -24,8 +24,9 @@ import json -from six.moves.urllib.parse import urlparse, urlencode +from six.moves.urllib.parse import urlparse import six +from azure.core.exceptions import DecodeError # type: ignore from . import documents from . import errors @@ -86,6 +87,8 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin tuple of (dict, dict) """ + # pylint: disable=protected-access + is_media = request.url.find("media") > -1 is_media_stream = is_media and connection_policy.MediaReadMode == documents.MediaReadMode.Streamed @@ -125,7 +128,7 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin connection_timeout=connection_timeout, connection_verify=kwargs.pop("connection_verify", ca_certs), connection_cert=kwargs.pop("connection_cert", cert_files), - + **kwargs ) else: response = pipeline_client._pipeline.run( @@ -133,7 +136,8 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin stream=is_media_stream, connection_timeout=connection_timeout, # If SSL is disabled, verify = false - connection_verify=kwargs.pop("connection_verify", is_ssl_enabled) + connection_verify=kwargs.pop("connection_verify", is_ssl_enabled), + **kwargs ) response = response.http_response @@ -149,8 +153,14 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin # python 3 compatible: convert data from byte to unicode string data = data.decode("utf-8") + if response.status_code == 404: + raise errors.CosmosResourceNotFoundError(message=data, response=response) + if response.status_code == 409: + raise errors.CosmosResourceExistsError(message=data, response=response) + if response.status_code == 412: + raise errors.CosmosAccessConditionFailedError(message=data, response=response) if response.status_code >= 400: - raise errors.HTTPFailure(response.status_code, data, headers) + raise errors.CosmosHttpResponseError(message=data, response=response) result = None if is_media: @@ -159,8 +169,11 @@ def _Request(global_endpoint_manager, request_params, connection_policy, pipelin if data: try: result = json.loads(data) - except: - raise errors.JSONParseFailure(data) + except Exception as e: + raise DecodeError( + message="Failed to decode JSON data: {}".format(e), + response=response, + error=e) return (result, headers) @@ -180,7 +193,7 @@ def SynchronizedRequest( :param object client: Document client instance :param dict request_params: - :param _GlobalEndpointManager global_endpoint_manager: + :param _GlobalEndpointManager global_endpoint_manager: :param documents.ConnectionPolicy connection_policy: :param azure.core.PipelineClient pipeline_client: PipelineClient to process the request. diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py index 4e24a938d2d7..f44e3a906bda 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_utils.py @@ -24,17 +24,13 @@ import platform import re -from . import http_constants +from .version import VERSION def get_user_agent(): - os_name = safe_user_agent_header(platform.system()) - os_version = safe_user_agent_header(platform.release()) + os_name = safe_user_agent_header(platform.platform()) python_version = safe_user_agent_header(platform.python_version()) - - user_agent = "{}/{} Python/{} {}/{}".format( - os_name, os_version, python_version, http_constants.Versions.SDKName, http_constants.Versions.SDKVersion - ) + user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format(VERSION, python_version, os_name) return user_agent diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py index 1c6832f31d3c..675f0801632d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_vector_session_token.py @@ -120,11 +120,10 @@ def merge(self, other): raise ValueError("Invalid Session Token (should not be None)") if self.version == other.version and len(self.local_lsn_by_region) != len(other.local_lsn_by_region): - raise errors.CosmosError( - Exception( - "Status Code: %s. Compared session tokens '%s' and '%s' have unexpected regions." - % (StatusCodes.INTERNAL_SERVER_ERROR, self.session_token, other.session_token) - ) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.INTERNAL_SERVER_ERROR, + message=("Compared session tokens '%s' and '%s' have unexpected regions." + % (self.session_token, other.session_token)) ) if self.version < other.version: @@ -148,11 +147,10 @@ def merge(self, other): if local_lsn2 is not None: highest_local_lsn_by_region[region_id] = max(local_lsn1, local_lsn2) elif self.version == other.version: - raise errors.CosmosError( - Exception( - "Status Code: %s. Compared session tokens '%s' and '%s' have unexpected regions." - % (StatusCodes.INTERNAL_SERVER_ERROR, self.session_token, other.session_token) - ) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.INTERNAL_SERVER_ERROR, + message=("Compared session tokens '%s' and '%s' have unexpected regions." + % (self.session_token, other.session_token)) ) else: highest_local_lsn_by_region[region_id] = local_lsn1 diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index db929854e529..bb94879c7923 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -22,26 +22,29 @@ """Create, read, update and delete items in the Azure Cosmos DB SQL API service. """ -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from .errors import HTTPFailure +from ._base import build_options +from .errors import CosmosResourceNotFoundError from .http_constants import StatusCodes from .offer import Offer -from .scripts import Scripts -from ._query_iterable import QueryIterable +from .scripts import ScriptsProxy from .partition_key import NonePartitionKeyValue -__all__ = ("Container",) +__all__ = ("ContainerProxy",) # pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class Container: - """ An Azure Cosmos DB container. +class ContainerProxy(object): + """ + An interface to interact with a specific DB Container. + This class should not be instantiated directly, use :func:`DatabaseProxy.get_container_client` method. A container in an Azure Cosmos DB SQL API database is a collection of documents, each of which represented as an Item. @@ -62,28 +65,30 @@ def __init__(self, client_connection, database_link, id, properties=None): # py self._properties = properties self.container_link = u"{}/colls/{}".format(database_link, self.id) self._is_system_key = None - self._scripts = None + self._scripts = None # type: Optional[ScriptsProxy] def _get_properties(self): # type: () -> Dict[str, Any] if self._properties is None: - self.read() + self._properties = self.read() return self._properties @property def is_system_key(self): + # type: () -> bool if self._is_system_key is None: properties = self._get_properties() self._is_system_key = ( properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False ) - return self._is_system_key + return cast('bool', self._is_system_key) @property def scripts(self): + # type: () -> ScriptsProxy if self._scripts is None: - self._scripts = Scripts(self.client_connection, self.container_link, self.is_system_key) - return self._scripts + self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key) + return cast('ScriptsProxy', self._scripts) def _get_document_link(self, item_or_link): # type: (Union[Dict[str, Any], str]) -> str @@ -97,20 +102,22 @@ def _get_conflict_link(self, conflict_or_link): return u"{}/conflicts/{}".format(self.container_link, conflict_or_link) return conflict_or_link["_self"] + def _set_partition_key(self, partition_key): + if partition_key == NonePartitionKeyValue: + return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) + return partition_key + @distributed_trace def read( self, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - populate_partition_key_range_statistics=None, - populate_quota_info=None, - request_options=None, - response_hook=None, - **kwargs + populate_query_metrics=None, # type: Optional[bool] + populate_partition_key_range_statistics=None, # type: Optional[bool] + populate_quota_info=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (str, Dict[str, str], bool, bool, bool, Dict[str, Any], Optional[Callable]) -> Container - """ Read the container properties + # type: (...) -> Dict[str, Any] + """ + Read the container properties :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. @@ -120,17 +127,13 @@ def read( :param populate_quota_info: Enable returning collection storage quota information in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise `HTTPFailure`: Raised if the container couldn't be retrieved. This includes + :raise `CosmosHttpResponseError`: Raised if the container couldn't be retrieved. This includes if the container does not exist. - :returns: :class:`Container` instance representing the retrieved container. - + :returns: Dict representing the retrieved container. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if populate_partition_key_range_statistics is not None: @@ -139,29 +142,27 @@ def read( request_options["populateQuotaInfo"] = populate_quota_info collection_link = self.container_link - self._properties = self.client_connection.ReadContainer(collection_link, options=request_options, **kwargs) + self._properties = self.client_connection.ReadContainer( + collection_link, options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return self._properties + return cast('Dict[str, Any]', self._properties) @distributed_trace def read_item( self, item, # type: Union[str, Dict[str, Any]] partition_key, # type: Any - session_token=None, # type: str - initial_headers=None, # type: # type: Dict[str, str] - populate_query_metrics=None, # type: bool - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] """ - Get the item identified by `id`. + Get the item identified by `item`. :param item: The ID (name) or dict representing item to retrieve. :param partition_key: Partition key for the item to retrieve. @@ -172,7 +173,8 @@ def read_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: Dict representing the item to be retrieved. - :raise `HTTPFailure`: If the given item couldn't be retrieved. + :raise `CosmosHttpResponseError`: If the given item couldn't be retrieved. + :rtype: dict[str, Any] .. literalinclude:: ../../examples/examples.py :start-after: [START update_item] @@ -184,15 +186,11 @@ def read_item( """ doc_link = self._get_document_link(item) + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - if not request_options: - request_options = {} # type: Dict[str, Any] if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if post_trigger_include: @@ -206,16 +204,13 @@ def read_item( @distributed_trace def read_all_items( self, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all items in the container. + # type: (...) -> Iterable[Dict[str, Any]] + """ + List all items in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param session_token: Token for use with Session consistency. @@ -224,15 +219,12 @@ def read_all_items( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of items (dicts). + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -249,30 +241,29 @@ def read_all_items( @distributed_trace def query_items_change_feed( self, - partition_key_range_id=None, - is_start_from_beginning=False, - continuation=None, - max_item_count=None, - feed_options=None, - response_hook=None, - **kwargs + partition_key_range_id=None, # type: Optional[str] + is_start_from_beginning=False, # type: bool + continuation=None, # type: Optional[str] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any ): - """ Get a sorted list of items that were changed, in the order in which they were modified. + # type: (...) -> Iterable[Dict[str, Any]] + """ + Get a sorted list of items that were changed, in the order in which they were modified. :param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges. - This is used to process the change feed in parallel across multiple consumers. + This is used to process the change feed in parallel across multiple consumers. :param is_start_from_beginning: Get whether change feed should start from - beginning (true) or from current (false). - By default it's start from current (false). + beginning (true) or from current (false). By default it's start from current (false). :param continuation: e_tag value to be used as continuation for reading change feed. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of items (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key_range_id is not None: feed_options["partitionKeyRangeId"] = partition_key_range_id if is_start_from_beginning is not None: @@ -296,27 +287,28 @@ def query_items_change_feed( def query_items( self, query, # type: str - parameters=None, # type: List - partition_key=None, # type: Any - enable_cross_partition_query=None, # type: bool - max_item_count=None, # type: int - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - enable_scan_in_query=None, # type: bool - populate_query_metrics=None, # type: bool - feed_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + parameters=None, # type: Optional[List[str]] + partition_key=None, # type: Optional[Any] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + enable_scan_in_query=None, # type: Optional[bool] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (...) -> QueryIterable - """Return all results matching the given `query`. + # type: (...) -> Iterable[Dict[str, Any]] + """ + Return all results matching the given `query`. + + You can use any value for the container name in the FROM clause, but typically the container name is used. + In the examples below, the container name is "products," and is aliased as "p" for easier referencing + in the WHERE clause. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param partition_key: Specifies the partition key value for the item. :param enable_cross_partition_query: Allows sending of more than one request to execute the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. + More than one request is necessary if the query is not scoped to single partition key value. :param max_item_count: Max number of items to be returned in the enumeration operation. :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. @@ -326,10 +318,7 @@ def query_items( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of items (dicts). - - You can use any value for the container name in the FROM clause, but typically the container name is used. - In the examples below, the container name is "products," and is aliased as "p" for easier referencing - in the WHERE clause. + :rtype: Iterable[dict[str, Any]] .. literalinclude:: ../../examples/examples.py :start-after: [START query_items] @@ -348,16 +337,12 @@ def query_items( :name: query_items_param """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if enable_cross_partition_query is not None: feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics if partition_key is not None: @@ -369,7 +354,7 @@ def query_items( response_hook.clear() items = self.client_connection.QueryItems( - database_or_Container_link=self.container_link, + database_or_container_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, partition_key=partition_key, @@ -385,18 +370,14 @@ def replace_item( self, item, # type: Union[str, Dict[str, Any]] body, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] - """ Replaces the specified item if it exists in the container. + """ + Replaces the specified item if it exists in the container. :param item: The ID (name) or dict representing item to be replaced. :param body: A dict-like object representing the item to replace. @@ -409,19 +390,13 @@ def replace_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the item after replace went through. - :raise `HTTPFailure`: If the replace failed or the item with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the item with given id does not exist. + :rtype: dict[str, Any] """ item_link = self._get_document_link(item) - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -440,18 +415,15 @@ def replace_item( def upsert_item( self, body, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] - """ Insert or update the specified item. + """ + Insert or update the specified item. + If the item already exists in the container, it is replaced. If it does not, it is inserted. :param body: A dict-like object representing the item to update or insert. :param session_token: Token for use with Session consistency. @@ -463,20 +435,12 @@ def upsert_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the upserted item. - :raise `HTTPFailure`: If the given item could not be upserted. - - If the item already exists in the container, it is replaced. If it does not, it is inserted. - + :raise `CosmosHttpResponseError`: If the given item could not be upserted. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) request_options["disableIdGeneration"] = True - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -485,7 +449,7 @@ def upsert_item( request_options["postTriggerInclude"] = post_trigger_include result = self.client_connection.UpsertItem( - database_or_Container_link=self.container_link, document=body, **kwargs) + database_or_container_link=self.container_link, document=body, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, result) return result @@ -494,19 +458,16 @@ def upsert_item( def create_item( self, body, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - indexing_directive=None, # type: Any - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + indexing_directive=None, # type: Optional[Any] + **kwargs # type: Any ): # type: (...) -> Dict[str, str] - """ Create an item in the container. + """ + Create an item in the container. + To update or replace an existing item, use the :func:`ContainerProxy.upsert_item` method. :param body: A dict-like object representing the item to create. :param session_token: Token for use with Session consistency. @@ -519,21 +480,13 @@ def create_item( :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the new item. - :raises `HTTPFailure`: If item with the given ID already exists. - - To update or replace an existing item, use the :func:`Container.upsert_item` method. - + :raises `CosmosHttpResponseError`: If item with the given ID already exists. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) request_options["disableAutomaticIdGeneration"] = True - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -544,7 +497,7 @@ def create_item( request_options["indexingDirective"] = indexing_directive result = self.client_connection.CreateItem( - database_or_Container_link=self.container_link, document=body, options=request_options, **kwargs + database_or_container_link=self.container_link, document=body, options=request_options, **kwargs ) if response_hook: response_hook(self.client_connection.last_response_headers, result) @@ -555,18 +508,14 @@ def delete_item( self, item, # type: Union[Dict[str, Any], str] partition_key, # type: Any - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - pre_trigger_include=None, # type: str - post_trigger_include=None, # type: str - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + populate_query_metrics=None, # type: Optional[bool] + pre_trigger_include=None, # type: Optional[str] + post_trigger_include=None, # type: Optional[str] + **kwargs # type: Any ): # type: (...) -> None - """ Delete the specified item from the container. + """ + Delete the specified item from the container. :param item: The ID (name) or dict representing item to be deleted. :param partition_key: Specifies the partition key value for the item. @@ -578,20 +527,14 @@ def delete_item( :param post_trigger_include: trigger id to be used as post operation trigger. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The item wasn't deleted successfully. If the item does not + :raises `CosmosHttpResponseError`: The item wasn't deleted successfully. If the item does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if pre_trigger_include: @@ -605,15 +548,17 @@ def delete_item( response_hook(self.client_connection.last_response_headers, result) @distributed_trace - def read_offer(self, response_hook=None, **kwargs): - # type: (Optional[Callable]) -> Offer - """ Read the Offer object for this container. + def read_offer(self, **kwargs): + # type: (Any) -> Offer + """ + Read the Offer object for this container. :param response_hook: a callable invoked with the response metadata :returns: Offer for the container. - :raise HTTPFailure: If no offer exists for the container or if the offer could not be retrieved. - + :raise CosmosHttpResponseError: If no offer exists for the container or if the offer could not be retrieved. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -622,7 +567,9 @@ def read_offer(self, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for container " + self.container_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) if response_hook: response_hook(self.client_connection.last_response_headers, offers) @@ -630,16 +577,18 @@ def read_offer(self, response_hook=None, **kwargs): return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) @distributed_trace - def replace_throughput(self, throughput, response_hook=None, **kwargs): - # type: (int, Optional[Callable]) -> Offer - """ Replace the container's throughput + def replace_throughput(self, throughput, **kwargs): + # type: (int, Any) -> Offer + """ + Replace the container's throughput :param throughput: The throughput to be set (an integer). :param response_hook: a callable invoked with the response metadata :returns: Offer for the container, updated with new throughput. - :raise HTTPFailure: If no offer exists for the container or if the offer could not be updated. - + :raise CosmosHttpResponseError: If no offer exists for the container or if the offer could not be updated. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -648,7 +597,9 @@ def replace_throughput(self, throughput, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for container " + self.container_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for container " + self.container_link) new_offer = offers[0].copy() new_offer["content"]["offerThroughput"] = throughput data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) @@ -659,18 +610,19 @@ def replace_throughput(self, throughput, response_hook=None, **kwargs): return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data) @distributed_trace - def read_all_conflicts(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all conflicts in the container. + def list_conflicts(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all conflicts in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of conflicts (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -684,32 +636,31 @@ def read_all_conflicts(self, max_item_count=None, feed_options=None, response_ho @distributed_trace def query_conflicts( self, - query, - parameters=None, - enable_cross_partition_query=None, - partition_key=None, - max_item_count=None, - feed_options=None, - response_hook=None, - **kwargs + query, # type: str + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + partition_key=None, # type: Optional[Any] + max_item_count=None, # type: Optional[int] + **kwargs # type: Any ): - # type: (str, List, bool, Any, int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """Return all conflicts matching the given `query`. + # type: (...) -> Iterable[Dict[str, Any]] + """ + Return all conflicts matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param partition_key: Specifies the partition key value for the item. :param enable_cross_partition_query: Allows sending of more than one request to execute the query in the Azure Cosmos DB service. - More than one request is necessary if the query is not scoped to single partition key value. + More than one request is necessary if the query is not scoped to single partition key value. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of conflicts (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count if enable_cross_partition_query is not None: @@ -728,20 +679,21 @@ def query_conflicts( return result @distributed_trace - def get_conflict(self, conflict, partition_key, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, Dict[str, Any]], Any, Dict[str, Any], Optional[Callable]) -> Dict[str, str] - """ Get the conflict identified by `id`. + def get_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str] + """ + Get the conflict identified by `conflict`. :param conflict: The ID (name) or dict representing the conflict to retrieve. :param partition_key: Partition key for the conflict to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the retrieved conflict. - :raise `HTTPFailure`: If the given conflict couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given conflict couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) @@ -753,20 +705,21 @@ def get_conflict(self, conflict, partition_key, request_options=None, response_h return result @distributed_trace - def delete_conflict(self, conflict, partition_key, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, Dict[str, Any]], Any, Dict[str, Any], Optional[Callable]) -> None - """ Delete the specified conflict from the container. + def delete_conflict(self, conflict, partition_key, **kwargs): + # type: (Union[str, Dict[str, Any]], Any, Any) -> None + """ + Delete the specified conflict from the container. :param conflict: The ID (name) or dict representing the conflict to be deleted. :param partition_key: Partition key for the conflict to delete. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The conflict wasn't deleted successfully. If the conflict + :raises `CosmosHttpResponseError`: The conflict wasn't deleted successfully. If the conflict does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if partition_key: request_options["partitionKey"] = self._set_partition_key(partition_key) @@ -775,8 +728,3 @@ def delete_conflict(self, conflict, partition_key, request_options=None, respons ) if response_hook: response_hook(self.client_connection.last_response_headers, result) - - def _set_partition_key(self, partition_key): - if partition_key == NonePartitionKeyValue: - return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) - return partition_key diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 9b0aa1836831..9387163ce416 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -22,91 +22,205 @@ """Create, read, and delete databases in the Azure Cosmos DB SQL API service. """ -from typing import Any, Callable, Dict, Mapping, Optional, Union, cast +from typing import Any, Dict, Mapping, Optional, Union, cast, Iterable, List import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from .database import Database +from ._base import build_options +from .database import DatabaseProxy from .documents import ConnectionPolicy, DatabaseAccount -from ._query_iterable import QueryIterable __all__ = ("CosmosClient",) -class CosmosClient: +def _parse_connection_str(conn_str, credential): + # type: (str, Optional[Any]) -> Dict[str, str] + conn_str = conn_str.rstrip(";") + conn_settings = dict( # type: ignore # pylint: disable=consider-using-dict-comprehension + s.split("=", 1) for s in conn_str.split(";") + ) + if 'AccountEndpoint' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountEndpoint'.") + if not credential and 'AccountKey' not in conn_settings: + raise ValueError("Connection string missing setting 'AccountKey'.") + return conn_settings + + +def _build_auth(credential): + # type: (Any) -> Dict[str, Any] + auth = {} + if isinstance(credential, six.string_types): + auth['masterKey'] = credential + elif isinstance(credential, dict): + if any(k for k in credential.keys() if k in ['masterKey', 'resourceTokens', 'permissionFeed']): + return credential # Backwards compatible + auth['resourceTokens'] = credential # type: ignore + elif hasattr(credential, '__iter__'): + auth['permissionFeed'] = credential + else: + raise TypeError( + "Unrecognized credential type. Please supply the master key as str, " + "or a dictionary or resource tokens, or a list of permissions.") + return auth + + +def _build_connection_policy(kwargs): + # type: (Dict[str, Any]) -> ConnectionPolicy + # pylint: disable=protected-access + policy = kwargs.pop('connection_policy', None) or ConnectionPolicy() + + # Connection config + policy.RequestTimeout = kwargs.pop('request_timeout', None) or \ + kwargs.pop('connection_timeout', None) or \ + policy.RequestTimeout + policy.MediaRequestTimeout = kwargs.pop('media_request_timeout', None) or policy.MediaRequestTimeout + policy.ConnectionMode = kwargs.pop('connection_mode', None) or policy.ConnectionMode + policy.MediaReadMode = kwargs.pop('media_read_mode', None) or policy.MediaReadMode + policy.ProxyConfiguration = kwargs.pop('proxy_config', None) or policy.ProxyConfiguration + policy.EnableEndpointDiscovery = kwargs.pop('enable_endpoint_discovery', None) or policy.EnableEndpointDiscovery + policy.PreferredLocations = kwargs.pop('preferred_locations', None) or policy.PreferredLocations + policy.UseMultipleWriteLocations = kwargs.pop('multiple_write_locations', None) or \ + policy.UseMultipleWriteLocations + + # SSL config + verify = kwargs.pop('connection_verify', None) + policy.DisableSSLVerification = not bool(verify if verify is not None else True) + ssl = kwargs.pop('ssl_config', None) or policy.SSLConfiguration + if ssl: + ssl.SSLCertFile = kwargs.pop('connection_cert', None) or ssl.SSLCertFile + ssl.SSLCaCerts = verify or ssl.SSLCaCerts + policy.SSLConfiguration = ssl + + # Retry config + retry = kwargs.pop('retry_options', None) or policy.RetryOptions + retry._max_retry_attempt_count = kwargs.pop('retry_total', None) or retry._max_retry_attempt_count + retry._fixed_retry_interval_in_milliseconds = kwargs.pop('retry_fixed_interval', None) or \ + retry._fixed_retry_interval_in_milliseconds + retry._max_wait_time_in_seconds = kwargs.pop('retry_backoff_max', None) or retry._max_wait_time_in_seconds + policy.RetryOptions = retry + + return policy + + +class CosmosClient(object): """ Provides a client-side logical representation of an Azure Cosmos DB account. Use this client to configure and execute requests to the Azure Cosmos DB service. + + :param str url: The URL of the Cosmos DB account. + :param credential: + Can be the account key, or a dictionary of resource tokens. + :type credential: str or dict(str, str) + :param str consistency_level: + Consistency level to use for the session. The default value is "Session". + + **Keyword arguments:** + + *request_timeout* - The HTTP request timeout in seconds. + *media_request_timeout* - The media request timeout in seconds. + *connection_mode* - The connection mode for the client - currently only supports 'Gateway'. + *media_read_mode* - The mode for use with downloading attachment content - default value is `Buffered`. + *proxy_config* - Instance of ~azure.cosmos.documents.ProxyConfiguration + *ssl_config* - Instance of ~azure.cosmos.documents.SSLConfiguration + *connection_verify* - Whether to verify the connection, default value is True. + *connection_cert* - An alternative certificate to verify the connection. + *retry_total* - Maximum retry attempts. + *retry_backoff_max* - Maximum retry wait time in seconds. + *retry_fixed_interval* - Fixed retry interval in milliseconds. + *enable_endpoint_discovery* - Enable endpoint discovery for geo-replicated database accounts. Default is True. + *preferred_locations* - The preferred locations for geo-replicated database accounts. + When `enable_endpoint_discovery` is true and `preferred_locations` is non-empty, + the client will use this list to evaluate the final location, taking into consideration + the order specified in `preferred_locations` list. The locations in this list are specified + as the names of the azure Cosmos locations like, 'West US', 'East US', 'Central India' + and so on. + *connection_policy* - An instance of ~azure.cosmos.documents.ConnectionPolicy + + .. literalinclude:: ../../examples/examples.py + :start-after: [START create_client] + :end-before: [END create_client] + :language: python + :dedent: 0 + :caption: Create a new instance of the Cosmos DB client: + :name: create_client """ - def __init__( - self, url, auth, consistency_level="Session", connection_policy=None - ): # pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs,line-too-long - # type: (str, Dict[str, str], str, ConnectionPolicy) -> None - """ Instantiate a new CosmosClient. - - :param url: The URL of the Cosmos DB account. - :param auth: - Contains 'masterKey' or 'resourceTokens', where - auth['masterKey'] is the default authorization key to use to - create the client, and auth['resourceTokens'] is the alternative - authorization key. - :param consistency_level: Consistency level to use for the session. - :param connection_policy: Connection policy to use for the session. + def __init__(self, url, credential, consistency_level="Session", **kwargs): + # type: (str, Any, str, Any) -> None + """ Instantiate a new CosmosClient.""" + auth = _build_auth(credential) + connection_policy = _build_connection_policy(kwargs) + self.client_connection = CosmosClientConnection( + url, auth=auth, consistency_level=consistency_level, connection_policy=connection_policy, **kwargs + ) - .. literalinclude:: ../../examples/examples.py - :start-after: [START create_client] - :end-before: [END create_client] - :language: python - :dedent: 0 - :caption: Create a new instance of the Cosmos DB client: - :name: create_client + def __enter__(self): + self.client_connection.pipeline_client.__enter__() + return self + + def __exit__(self, *args): + return self.client_connection.pipeline_client.__exit__(*args) + @classmethod + def from_connection_string(cls, conn_str, credential=None, consistency_level="Session", **kwargs): + # type: (str, Optional[Any], str, Any) -> CosmosClient """ - self.client_connection = CosmosClientConnection( - url, auth, consistency_level=consistency_level, connection_policy=connection_policy + Create CosmosClient from a connection string. + + This can be retrieved from the Azure portal.For full list of optional keyword + arguments, see the CosmosClient constructor. + + :param str conn_str: The connection string. + :param credential: Alternative credentials to use instead of the key provided in the + connection string. + :type credential: str or dict(str, str) + :param str consistency_level: Consistency level to use for the session. The default value is "Session". + """ + settings = _parse_connection_str(conn_str, credential) + return cls( + url=settings['AccountEndpoint'], + credential=credential or settings['AccountKey'], + consistency_level=consistency_level, + **kwargs ) @staticmethod def _get_database_link(database_or_id): - # type: (str) -> str + # type: (Union[DatabaseProxy, str, Dict[str, str]]) -> str if isinstance(database_or_id, six.string_types): return "dbs/{}".format(database_or_id) try: - return cast("Database", database_or_id).database_link + return cast("DatabaseProxy", database_or_id).database_link except AttributeError: pass database_id = cast("Dict[str, str]", database_or_id)["id"] return "dbs/{}".format(database_id) @distributed_trace - def create_database( + def create_database( # pylint: disable=redefined-builtin self, - id, # pylint: disable=redefined-builtin - session_token=None, - initial_headers=None, - access_condition=None, - populate_query_metrics=None, - offer_throughput=None, - request_options=None, - response_hook=None, - **kwargs + id, # type: str + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + **kwargs # type: Any ): - # type: (str, str, Dict[str, str], Dict[str, str], bool, int, Dict[str, Any], Optional[Callable]) -> Database - """Create a new database with the given ID (name). + # type: (...) -> DatabaseProxy + """ + Create a new database with the given ID (name). :param id: ID (name) of the database to create. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param access_condition: Conditions Associated with the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param offer_throughput: The provisioned throughput for this offer. - :param request_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata - :returns: A :class:`Database` instance representing the new database. - :raises `HTTPFailure`: If database with the given ID already exists. + :param str session_token: Token for use with Session consistency. + :param dict(str, str) initial_headers: Initial headers to be sent as part of the request. + :param dict(str, str) access_condition: Conditions Associated with the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param int offer_throughput: The provisioned throughput for this offer. + :param dict(str, Any) request_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata + :returns: A DatabaseProxy instance representing the new database. + :rtype: ~azure.cosmos.database.DatabaseProxy + :raises `CosmosResourceExistsError`: If database with the given ID already exists. .. literalinclude:: ../../examples/examples.py :start-after: [START create_database] @@ -118,14 +232,8 @@ def create_database( """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: @@ -134,59 +242,52 @@ def create_database( result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) - return Database(self.client_connection, id=result["id"], properties=result) + return DatabaseProxy(self.client_connection, id=result["id"], properties=result) def get_database_client(self, database): - # type: (Union[str, Database, Dict[str, Any]]) -> Database + # type: (Union[str, DatabaseProxy, Dict[str, Any]]) -> DatabaseProxy """ Retrieve an existing database with the ID (name) `id`. - :param database: The ID (name), dict representing the properties or :class:`Database` + :param database: The ID (name), dict representing the properties or `DatabaseProxy` instance of the database to read. - :returns: A :class:`Database` instance representing the retrieved database. - + :type database: str or dict(str, str) or ~azure.cosmos.database.DatabaseProxy + :returns: A `DatabaseProxy` instance representing the retrieved database. + :rtype: ~azure.cosmos.database.DatabaseProxy """ - if isinstance(database, Database): + if isinstance(database, DatabaseProxy): id_value = database.id elif isinstance(database, Mapping): id_value = database["id"] else: id_value = database - return Database(self.client_connection, id_value) + return DatabaseProxy(self.client_connection, id_value) @distributed_trace - def read_all_databases( + def list_databases( self, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable + # type: (...) -> Iterable[Dict[str, Any]] """ List the databases in a Cosmos DB SQL database account. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param feed_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param str session_token: Token for use with Session consistency. + :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param dict[str, str] feed_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata :returns: An Iterable of database properties (dicts). - + :rtype: Iterable[dict[str, str]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -198,45 +299,36 @@ def read_all_databases( @distributed_trace def query_databases( self, - query=None, # type: str - parameters=None, # type: List[str] - enable_cross_partition_query=None, # type: bool - max_item_count=None, # type: int - session_token=None, # type: str - initial_headers=None, # type: Dict[str,str] - populate_query_metrics=None, # type: bool - feed_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + enable_cross_partition_query=None, # type: Optional[bool] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (...) -> QueryIterable - + # type: (...) -> Iterable[Dict[str, Any]] """ Query the databases in a Cosmos DB SQL database account. - :param query: The Azure Cosmos DB SQL query to execute. - :param parameters: Optional array of parameters to the query. Ignored if no query is provided. - :param enable_cross_partition_query: Allow scan on the queries which couldn't be + :param str query: The Azure Cosmos DB SQL query to execute. + :param list[str] parameters: Optional array of parameters to the query. Ignored if no query is provided. + :param bool enable_cross_partition_query: Allow scan on the queries which couldn't be served as indexing was opted out on the requested paths. - :param max_item_count: Max number of items to be returned in the enumeration operation. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param feed_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata + :param int max_item_count: Max number of items to be returned in the enumeration operation. + :param str session_token: Token for use with Session consistency. + :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param dict[str, Any] feed_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata :returns: An Iterable of database properties (dicts). - + :rtype: Iterable[dict[str, str]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if enable_cross_partition_query is not None: feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -246,7 +338,7 @@ def query_databases( # (just returning a generator did not initiate the first network call, so # the headers were misleading) # This needs to change for "real" implementation - query = query if parameters is None else dict(query=query, parameters=parameters) + query = query if parameters is None else dict(query=query, parameters=parameters) # type: ignore result = self.client_connection.QueryDatabases(query=query, options=feed_options, **kwargs) else: result = self.client_connection.ReadDatabases(options=feed_options, **kwargs) @@ -257,38 +349,28 @@ def query_databases( @distributed_trace def delete_database( self, - database, # type: Union[str, Database, Dict[str, Any]] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + database, # type: Union[str, DatabaseProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): # type: (...) -> None """ Delete the database with the given ID (name). - :param database: The ID (name), dict representing the properties or :class:`Database` + :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` instance of the database to delete. - :param session_token: Token for use with Session consistency. - :param initial_headers: Initial headers to be sent as part of the request. - :param access_condition: Conditions Associated with the request. - :param populate_query_metrics: Enable returning query metrics in response headers. - :param request_options: Dictionary of additional properties to be used for the request. - :param response_hook: a callable invoked with the response metadata - :raise HTTPFailure: If the database couldn't be deleted. - + :type database: str or dict(str, str) or ~azure.cosmos.database.DatabaseProxy + :param str session_token: Token for use with Session consistency. + :param dict[str, str] initial_headers: Initial headers to be sent as part of the request. + :param dict[str, str] access_condition: Conditions Associated with the request. + :param bool populate_query_metrics: Enable returning query metrics in response headers. + :param dict[str, str] request_options: Dictionary of additional properties to be used for the request. + :param Callable response_hook: a callable invoked with the response metadata + :raise CosmosHttpResponseError: If the database couldn't be deleted. + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -298,15 +380,16 @@ def delete_database( response_hook(self.client_connection.last_response_headers) @distributed_trace - def get_database_account(self, response_hook=None, **kwargs): - # type: (Optional[Callable]) -> DatabaseAccount + def get_database_account(self, **kwargs): + # type: (Any) -> DatabaseAccount """ Retrieve the database account information. - :param response_hook: a callable invoked with the response metadata - :returns: A :class:`DatabaseAccount` instance representing the Cosmos DB Database Account. - + :param Callable response_hook: a callable invoked with the response metadata + :returns: A `DatabaseAccount` instance representing the Cosmos DB Database Account. + :rtype: ~azure.cosmos.documents.DatabaseAccount """ + response_hook = kwargs.pop('response_hook', None) result = self.client_connection.GetDatabaseAccount(**kwargs) if response_hook: response_hook(self.client_connection.last_response_headers) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index 0df8589b70c2..a1ee733918b5 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -22,26 +22,29 @@ """Create, read, update and delete containers in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Mapping, Union, cast +from typing import Any, List, Dict, Mapping, Union, cast, Iterable, Optional import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from .container import Container +from ._base import build_options +from .container import ContainerProxy from .offer import Offer from .http_constants import StatusCodes -from .errors import HTTPFailure -from .user import User -from ._query_iterable import QueryIterable +from .errors import CosmosResourceNotFoundError +from .user import UserProxy -__all__ = ("Database",) +__all__ = ("DatabaseProxy",) # pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class Database(object): - """ Represents an Azure Cosmos DB SQL API database. +class DatabaseProxy(object): + """ + An interface to interact with a specific database. + This class should not be instantiated directly, use :func:`CosmosClient.get_database_client` method. A database contains one or more containers, each of which can contain items, stored procedures, triggers, and user-defined functions. @@ -75,25 +78,25 @@ def __init__(self, client_connection, id, properties=None): # pylint: disable=r @staticmethod def _get_container_id(container_or_id): - # type: (Union[str, Container, Dict[str, Any]]) -> str + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str if isinstance(container_or_id, six.string_types): return container_or_id try: - return cast("Container", container_or_id).id + return cast("ContainerProxy", container_or_id).id except AttributeError: pass return cast("Dict[str, str]", container_or_id)["id"] def _get_container_link(self, container_or_id): - # type: (Union[str, Container, Dict[str, Any]]) -> str + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> str return u"{}/colls/{}".format(self.database_link, self._get_container_id(container_or_id)) def _get_user_link(self, user_or_id): - # type: (Union[User, str, Dict[str, Any]]) -> str + # type: (Union[UserProxy, str, Dict[str, Any]]) -> str if isinstance(user_or_id, six.string_types): return u"{}/users/{}".format(self.database_link, user_or_id) try: - return cast("User", user_or_id).user_link + return cast("UserProxy", user_or_id).user_link except AttributeError: pass return u"{}/users/{}".format(self.database_link, cast("Dict[str, str]", user_or_id)["id"]) @@ -101,77 +104,61 @@ def _get_user_link(self, user_or_id): def _get_properties(self): # type: () -> Dict[str, Any] if self._properties is None: - self.read() + self._properties = self.read() return self._properties @distributed_trace - def read( - self, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - request_options=None, - response_hook=None, - **kwargs - ): - # type: (str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> Dict[str, Any] + def read(self, populate_query_metrics=None, **kwargs): + # type: (Optional[bool], Any) -> Dict[str, Any] """ - Read the database properties + Read the database properties. - :param database: The ID (name), dict representing the properties or :class:`Database` + :param database: The ID (name), dict representing the properties or :class:`DatabaseProxy` instance of the database to read. :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. - :param populate_query_metrics: Enable returning query metrics in response headers. + :param bool populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: Dict[Str, Any] - :raise `HTTPFailure`: If the given database couldn't be retrieved. - + :rtype: Dict[Str, Any] + :raise `CosmosHttpResponseError`: If the given database couldn't be retrieved. """ # TODO this helper function should be extracted from CosmosClient from .cosmos_client import CosmosClient database_link = CosmosClient._get_database_link(self) - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics - self._properties = self.client_connection.ReadDatabase(database_link, options=request_options, **kwargs) + self._properties = self.client_connection.ReadDatabase( + database_link, options=request_options, **kwargs + ) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return self._properties + return cast('Dict[str, Any]', self._properties) @distributed_trace def create_container( self, id, # type: str # pylint: disable=redefined-builtin - partition_key, # type: PartitionKey - indexing_policy=None, # type: Dict[str, Any] - default_ttl=None, # type: int - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - offer_throughput=None, # type: int - unique_key_policy=None, # type: Dict[str, Any] - conflict_resolution_policy=None, # type: Dict[str, Any] - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + offer_throughput=None, # type: Optional[int] + unique_key_policy=None, # type: Optional[Dict[str, Any]] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + **kwargs # type: Any ): - # type: (...) -> Container + # type: (...) -> ContainerProxy """ Create a new container with the given ID (name). - If a container with the given ID already exists, an HTTPFailure with status_code 409 is raised. + If a container with the given ID already exists, a CosmosResourceExistsError is raised. :param id: ID (name) of container to create. :param partition_key: The partition key to use for the container. @@ -186,9 +173,9 @@ def create_container( :param conflict_resolution_policy: The conflict resolution policy to apply to the container. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`Container` instance representing the new container. - :raise HTTPFailure: The container creation failed. - + :returns: A `ContainerProxy` instance representing the new container. + :raise CosmosHttpResponseError: The container creation failed. + :rtype: ~azure.cosmos.container.ContainerProxy .. literalinclude:: ../../examples/examples.py :start-after: [START create_container] @@ -219,14 +206,8 @@ def create_container( if conflict_resolution_policy: definition["conflictResolutionPolicy"] = conflict_resolution_policy - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics if offer_throughput is not None: @@ -239,25 +220,21 @@ def create_container( if response_hook: response_hook(self.client_connection.last_response_headers, data) - return Container(self.client_connection, self.database_link, data["id"], properties=data) + return ContainerProxy(self.client_connection, self.database_link, data["id"], properties=data) @distributed_trace def delete_container( self, - container, # type: Union[str, Container, Dict[str, Any]] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): # type: (...) -> None - """ Delete the container + """ + Delete the container :param container: The ID (name) of the container to delete. You can either - pass in the ID of the container to delete, a :class:`Container` instance or + pass in the ID of the container to delete, a :class:`ContainerProxy` instance or a dict representing the properties of the container. :param session_token: Token for use with Session consistency. :param initial_headers: Initial headers to be sent as part of the request. @@ -265,17 +242,11 @@ def delete_container( :param populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise HTTPFailure: If the container couldn't be deleted. - + :raise CosmosHttpResponseError: If the container couldn't be deleted. + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -285,11 +256,13 @@ def delete_container( response_hook(self.client_connection.last_response_headers, result) def get_container_client(self, container): - # type: (Union[str, Container, Dict[str, Any]]) -> Container - """ Get the specified `Container`, or a container with specified ID (name). + # type: (Union[str, ContainerProxy, Dict[str, Any]]) -> ContainerProxy + """ + Get the specified `ContainerProxy`, or a container with specified ID (name). - :param container: The ID (name) of the container, a :class:`Container` instance, + :param container: The ID (name) of the container, a :class:`ContainerProxy` instance, or a dict representing the properties of the container to be retrieved. + :rtype: ~azure.cosmos.container.ContainerProxy .. literalinclude:: ../../examples/examples.py :start-after: [START get_container] @@ -300,28 +273,20 @@ def get_container_client(self, container): :name: get_container """ - if isinstance(container, Container): + if isinstance(container, ContainerProxy): id_value = container.id elif isinstance(container, Mapping): id_value = container["id"] else: id_value = container - return Container(self.client_connection, self.database_link, id_value) + return ContainerProxy(self.client_connection, self.database_link, id_value) @distributed_trace - def read_all_containers( - self, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs - ): - # type: (int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List the containers in the database. + def list_containers(self, max_item_count=None, populate_query_metrics=None, **kwargs): + # type: (Optional[int], Optional[bool], Any) -> Iterable[Dict[str, Any]] + """ + List the containers in the database. :param max_item_count: Max number of items to be returned in the enumeration operation. :param session_token: Token for use with Session consistency. @@ -330,6 +295,7 @@ def read_all_containers( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of container properties (dicts). + :rtype: Iterable[dict[str, Any]] .. literalinclude:: ../../examples/examples.py :start-after: [START list_containers] @@ -340,14 +306,10 @@ def read_all_containers( :name: list_containers """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -361,18 +323,15 @@ def read_all_containers( @distributed_trace def query_containers( self, - query=None, - parameters=None, - max_item_count=None, - session_token=None, - initial_headers=None, - populate_query_metrics=None, - feed_options=None, - response_hook=None, - **kwargs + query=None, # type: Optional[str] + parameters=None, # type: Optional[List[str]] + max_item_count=None, # type: Optional[int] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (str, List, int, str, Dict[str, str], bool, Dict[str, Any], Optional[Callable]) -> QueryIterable - """List properties for containers in the current database + # type: (...) -> Iterable[Dict[str, Any]] + """ + List properties for containers in the current database. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. @@ -383,16 +342,12 @@ def query_containers( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of container properties (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - if session_token: - feed_options["sessionToken"] = session_token - if initial_headers: - feed_options["initialHeaders"] = initial_headers if populate_query_metrics is not None: feed_options["populateQueryMetrics"] = populate_query_metrics @@ -409,29 +364,25 @@ def query_containers( @distributed_trace def replace_container( self, - container, # type: Union[str, Container, Dict[str, Any]] - partition_key, # type: PartitionKey - indexing_policy=None, # type: Dict[str, Any] - default_ttl=None, # type: int - conflict_resolution_policy=None, # type: Dict[str, Any] - session_token=None, # type: str - initial_headers=None, # type: Dict[str, str] - access_condition=None, # type: Dict[str, str] - populate_query_metrics=None, # type: bool - request_options=None, # type: Dict[str, Any] - response_hook=None, # type: Optional[Callable] - **kwargs + container, # type: Union[str, ContainerProxy, Dict[str, Any]] + partition_key, # type: Any + indexing_policy=None, # type: Optional[Dict[str, Any]] + default_ttl=None, # type: Optional[int] + conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] + populate_query_metrics=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (...) -> Container - """ Reset the properties of the container. Property changes are persisted immediately. - + # type: (...) -> ContainerProxy + """ + Reset the properties of the container. Property changes are persisted immediately. Any properties not specified will be reset to their default values. :param container: The ID (name), dict representing the properties or - :class:`Container` instance of the container to be replaced. + :class:`ContainerProxy` instance of the container to be replaced. :param partition_key: The partition key to use for the container. :param indexing_policy: The indexing policy to apply to the container. - :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. + :param default_ttl: Default time to live (TTL) for items in the container. + If unspecified, items do not expire. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. :param session_token: Token for use with Session consistency. :param access_condition: Conditions Associated with the request. @@ -439,9 +390,10 @@ def replace_container( :param populate_query_metrics: Enable returning query metrics in response headers. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raise `HTTPFailure`: Raised if the container couldn't be replaced. This includes + :raise `CosmosHttpResponseError`: Raised if the container couldn't be replaced. This includes if the container with given id does not exist. - :returns: :class:`Container` instance representing the container after replace completed. + :returns: A `ContainerProxy` instance representing the container after replace completed. + :rtype: ~azure.cosmos.container.ContainerProxy .. literalinclude:: ../../examples/examples.py :start-after: [START reset_container_properties] @@ -452,14 +404,8 @@ def replace_container( :name: reset_container_properties """ - if not request_options: - request_options = {} # type: Dict[str, Any] - if session_token: - request_options["sessionToken"] = session_token - if initial_headers: - request_options["initialHeaders"] = initial_headers - if access_condition: - request_options["accessCondition"] = access_condition + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if populate_query_metrics is not None: request_options["populateQueryMetrics"] = populate_query_metrics @@ -484,23 +430,24 @@ def replace_container( if response_hook: response_hook(self.client_connection.last_response_headers, container_properties) - return Container( + return ContainerProxy( self.client_connection, self.database_link, container_properties["id"], properties=container_properties ) @distributed_trace - def read_all_users(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all users in the container. + def list_users(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all users in the container. :param max_item_count: Max number of users to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of user properties (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -512,9 +459,10 @@ def read_all_users(self, max_item_count=None, feed_options=None, response_hook=N return result @distributed_trace - def query_users(self, query, parameters=None, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (str, List, int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """Return all users matching the given `query`. + def query_users(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all users matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. @@ -522,10 +470,10 @@ def query_users(self, query, parameters=None, max_item_count=None, feed_options= :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of user properties (dicts). - + :rtype: Iterable[str, Any] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -540,38 +488,39 @@ def query_users(self, query, parameters=None, max_item_count=None, feed_options= return result def get_user_client(self, user): - # type: (Union[str, User, Dict[str, Any]]) -> User + # type: (Union[str, UserProxy, Dict[str, Any]]) -> UserProxy """ - Get the user identified by `id`. + Get the user identified by `user`. - :param user: The ID (name), dict representing the properties or :class:`User` + :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be retrieved. - :returns: A :class:`User` instance representing the retrieved user. - :raise `HTTPFailure`: If the given user couldn't be retrieved. - + :returns: A `UserProxy` instance representing the retrieved user. + :raise `CosmosHttpResponseError`: If the given user couldn't be retrieved. + :rtype: ~azure.cosmos.user.UserProxy """ - if isinstance(user, User): + if isinstance(user, UserProxy): id_value = user.id elif isinstance(user, Mapping): id_value = user["id"] else: id_value = user - return User(client_connection=self.client_connection, id=id_value, database_link=self.database_link) + return UserProxy(client_connection=self.client_connection, id=id_value, database_link=self.database_link) @distributed_trace - def create_user(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User - """ Create a user in the container. + def create_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """ + Create a user in the container. + To update or replace an existing user, use the :func:`ContainerProxy.upsert_user` method. :param body: A dict-like object with an `id` key and value representing the user to be created. The user ID must be unique within the database, and consist of no more than 255 characters. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the new user. - :raise `HTTPFailure`: If the given user couldn't be created. - - To update or replace an existing user, use the :func:`Container.upsert_user` method. + :returns: A `UserProxy` instance representing the new user. + :raise `CosmosHttpResponseError`: If the given user couldn't be created. + :rtype: ~azure.cosmos.user.UserProxy .. literalinclude:: ../../examples/examples.py :start-after: [START create_user] @@ -582,8 +531,8 @@ def create_user(self, body, request_options=None, response_hook=None, **kwargs): :name: create_user """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) user = self.client_connection.CreateUser( database_link=self.database_link, user=body, options=request_options, **kwargs) @@ -591,26 +540,26 @@ def create_user(self, body, request_options=None, response_hook=None, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, user) - return User( + return UserProxy( client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) @distributed_trace - def upsert_user(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User - """ Insert or update the specified user. + def upsert_user(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> UserProxy + """ + Insert or update the specified user. + If the user already exists in the container, it is replaced. If it does not, it is inserted. :param body: A dict-like object representing the user to update or insert. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the upserted user. - :raise `HTTPFailure`: If the given user could not be upserted. - - If the user already exists in the container, it is replaced. If it does not, it is inserted. - + :returns: A `UserProxy` instance representing the upserted user. + :raise `CosmosHttpResponseError`: If the given user could not be upserted. + :rtype: ~azure.cosmos.user.UserProxy """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) user = self.client_connection.UpsertUser( database_link=self.database_link, user=body, options=request_options, **kwargs @@ -619,53 +568,63 @@ def upsert_user(self, body, request_options=None, response_hook=None, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, user) - return User( + return UserProxy( client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user ) @distributed_trace - def replace_user(self, user, body, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, User, Dict[str, Any]], Dict[str, Any], Dict[str, Any], Optional[Callable]) -> User - """ Replaces the specified user if it exists in the container. + def replace_user( + self, + user, # type: Union[str, UserProxy, Dict[str, Any]] + body, # type: Dict[str, Any] + **kwargs # type: Any + ): + # type: (...) -> UserProxy + """ + Replaces the specified user if it exists in the container. - :param user: The ID (name), dict representing the properties or :class:`User` + :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be replaced. :param body: A dict-like object representing the user to replace. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the user after replace went through. - :raise `HTTPFailure`: If the replace failed or the user with given id does not exist. - + :returns: A `UserProxy` instance representing the user after replace went through. + :raise `CosmosHttpResponseError`: If the replace failed or the user with given id does not exist. + :rtype: ~azure.cosmos.user.UserProxy """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - user = self.client_connection.ReplaceUser( + replaced_user = self.client_connection.ReplaceUser( user_link=self._get_user_link(user), user=body, options=request_options, **kwargs - ) + ) # type: Dict[str, str] if response_hook: - response_hook(self.client_connection.last_response_headers, user) + response_hook(self.client_connection.last_response_headers, replaced_user) - return User( - client_connection=self.client_connection, id=user["id"], database_link=self.database_link, properties=user + return UserProxy( + client_connection=self.client_connection, + id=replaced_user["id"], + database_link=self.database_link, + properties=replaced_user ) @distributed_trace - def delete_user(self, user, request_options=None, response_hook=None, **kwargs): - # type: (Union[str, User, Dict[str, Any]], Dict[str, Any], Optional[Callable]) -> None - """ Delete the specified user from the container. + def delete_user(self, user, **kwargs): + # type: (Union[str, UserProxy, Dict[str, Any]], Any) -> None + """ + Delete the specified user from the container. - :param user: The ID (name), dict representing the properties or :class:`User` + :param user: The ID (name), dict representing the properties or :class:`UserProxy` instance of the user to be deleted. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The user wasn't deleted successfully. If the user does not + :raises `CosmosHttpResponseError`: The user wasn't deleted successfully. If the user does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) result = self.client_connection.DeleteUser( user_link=self._get_user_link(user), options=request_options, **kwargs @@ -674,15 +633,17 @@ def delete_user(self, user, request_options=None, response_hook=None, **kwargs): response_hook(self.client_connection.last_response_headers, result) @distributed_trace - def read_offer(self, response_hook=None, **kwargs): - # type: (Optional[Callable]) -> Offer - """ Read the Offer object for this database. + def read_offer(self, **kwargs): + # type: (Any) -> Offer + """ + Read the Offer object for this database. :param response_hook: a callable invoked with the response metadata :returns: Offer for the database. - :raise HTTPFailure: If no offer exists for the database or if the offer could not be retrieved. - + :raise CosmosHttpResponseError: If no offer exists for the database or if the offer could not be retrieved. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -691,7 +652,9 @@ def read_offer(self, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec, **kwargs)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for database " + self.database_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for database " + self.database_link) if response_hook: response_hook(self.client_connection.last_response_headers, offers) @@ -699,16 +662,18 @@ def read_offer(self, response_hook=None, **kwargs): return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0]) @distributed_trace - def replace_throughput(self, throughput, response_hook=None, **kwargs): - # type: (int, Optional[Callable]) -> Offer - """ Replace the database level throughput. + def replace_throughput(self, throughput, **kwargs): + # type: (Optional[int], Any) -> Offer + """ + Replace the database level throughput. :param throughput: The throughput to be set (an integer). :param response_hook: a callable invoked with the response metadata :returns: Offer for the database, updated with new throughput. - :raise HTTPFailure: If no offer exists for the database or if the offer could not be updated. - + :raise CosmosHttpResponseError: If no offer exists for the database or if the offer could not be updated. + :rtype: ~azure.cosmos.offer.Offer """ + response_hook = kwargs.pop('response_hook', None) properties = self._get_properties() link = properties["_self"] query_spec = { @@ -717,7 +682,9 @@ def replace_throughput(self, throughput, response_hook=None, **kwargs): } offers = list(self.client_connection.QueryOffers(query_spec)) if not offers: - raise HTTPFailure(StatusCodes.NOT_FOUND, "Could not find Offer for collection " + self.database_link) + raise CosmosResourceNotFoundError( + status_code=StatusCodes.NOT_FOUND, + message="Could not find Offer for collection " + self.database_link) new_offer = offers[0].copy() new_offer["content"]["offerThroughput"] = throughput data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py b/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py index 181136eb93f3..5fcb514959c1 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/errors.py @@ -21,45 +21,45 @@ """PyCosmos Exceptions in the Azure Cosmos database service. """ +from azure.core.exceptions import ( # type: ignore # pylint: disable=unused-import + AzureError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError +) from . import http_constants -class CosmosError(Exception): - """Base class for all Azure Cosmos errors. - """ +class CosmosHttpResponseError(HttpResponseError): + """Raised when a HTTP request to the Azure Cosmos has failed.""" - -class HTTPFailure(CosmosError): - """Raised when a HTTP request to the Azure Cosmos has failed. - """ - - def __init__(self, status_code, message="", headers=None): + def __init__(self, status_code=None, message=None, response=None, **kwargs): """ - :param int status_code: - :param str message: - + :param int status_code: HTTP response code. + :param str message: Error message. """ - if headers is None: - headers = {} - - self.status_code = status_code - self.headers = headers + self.headers = response.headers if response else {} self.sub_status = None - self._http_error_message = message + self.http_error_message = message + status = status_code or (int(response.status_code) if response else 0) + if http_constants.HttpHeaders.SubStatus in self.headers: self.sub_status = int(self.headers[http_constants.HttpHeaders.SubStatus]) - CosmosError.__init__( - self, "Status code: %d Sub-status: %d\n%s" % (self.status_code, self.sub_status, message) - ) + formatted_message = "Status code: %d Sub-status: %d\n%s" % (status, self.sub_status, str(message)) else: - CosmosError.__init__(self, "Status code: %d\n%s" % (self.status_code, message)) + formatted_message = "Status code: %d\n%s" % (status, str(message)) + + super(CosmosHttpResponseError, self).__init__(message=formatted_message, response=response, **kwargs) + self.status_code = status + + +class CosmosResourceNotFoundError(ResourceNotFoundError, CosmosHttpResponseError): + """An error response with status code 404.""" -class JSONParseFailure(CosmosError): - """Raised when fails to parse JSON message. - """ +class CosmosResourceExistsError(ResourceExistsError, CosmosHttpResponseError): + """An error response with status code 409.""" -class UnexpectedDataType(CosmosError): - """Raised when unexpected data type is provided as parameter. - """ +class CosmosAccessConditionFailedError(CosmosHttpResponseError): + """An error response with status code 412.""" diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py index b6ea40ee7f45..da327a708548 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py @@ -23,7 +23,7 @@ """ -class HttpMethods: +class HttpMethods(object): """Constants of http methods. """ @@ -35,7 +35,7 @@ class HttpMethods: Options = "OPTIONS" -class HttpHeaders: +class HttpHeaders(object): """Constants of http headers. """ @@ -197,14 +197,14 @@ class HttpHeaders: AllowTentativeWrites = "x-ms-cosmos-allow-tentative-writes" -class HttpHeaderPreferenceTokens: +class HttpHeaderPreferenceTokens(object): """Constants of http header preference tokens. """ PreferUnfilteredQueryResponse = "PreferUnfilteredQueryResponse" -class HttpStatusDescriptions: +class HttpStatusDescriptions(object): """Constants of http status descriptions. """ @@ -234,7 +234,7 @@ class HttpStatusDescriptions: RetryWith = "Retry the request" -class QueryStrings: +class QueryStrings(object): """Constants of query strings. """ @@ -252,23 +252,22 @@ class QueryStrings: Generic = "generic" -class CookieHeaders: +class CookieHeaders(object): """Constants of cookie headers. """ SessionToken = "x-ms-session-token" -class Versions: +class Versions(object): """Constants of versions. """ CurrentVersion = "2018-12-31" SDKName = "azure-cosmos" - SDKVersion = "4.0.0a1" -class Delimiters: +class Delimiters(object): """Constants of delimiters. """ @@ -276,7 +275,7 @@ class Delimiters: ClientContinuationFormat = "{0}!!{1}" -class HttpListenerErrorCodes: +class HttpListenerErrorCodes(object): """Constants of http listener error codes. """ @@ -284,14 +283,14 @@ class HttpListenerErrorCodes: ERROR_CONNECTION_INVALID = 1229 -class HttpContextProperties: +class HttpContextProperties(object): """Constants of http context properties. """ SubscriptionId = "SubscriptionId" -class _ErrorCodes: +class _ErrorCodes(object): """Windows Socket Error Codes """ @@ -316,7 +315,7 @@ class _ErrorCodes: LinuxConnectionReset = 131 -class StatusCodes: +class StatusCodes(object): """HTTP status codes returned by the REST operations """ @@ -350,7 +349,7 @@ class StatusCodes: OPERATION_CANCELLED = 1201 -class SubStatusCodes: +class SubStatusCodes(object): """Sub status codes returned by the REST operations specifying the details of the operation """ @@ -385,7 +384,7 @@ class SubStatusCodes: INSUFFICIENT_BINDABLE_PARTITIONS = 1007 -class ResourceType: +class ResourceType(object): """Types of resources in Azure Cosmos """ diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py index 4e7b240d1c8f..c4087542f003 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py @@ -21,6 +21,7 @@ """Represents an offer in the Azure Cosmos DB SQL API service. """ +from typing import Dict, Any class Offer(dict): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py b/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py index 44a5a7ee2fc3..3432e741de8c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/permission.py @@ -21,13 +21,14 @@ """Represents a Permission object in the Azure Cosmos DB SQL API service. """ +from typing import Dict, Any, Union from .documents import PermissionMode -class Permission: +class Permission(object): def __init__(self, id, user_link, permission_mode, resource_link, properties): # pylint: disable=redefined-builtin - # type: (str, str, PermissionMode, str, Dict[str, Any]) -> None + # type: (str, str, Union[str, PermissionMode], str, Dict[str, Any]) -> None self.id = id self.user_link = user_link self.permission_mode = permission_mode diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py index a0e889e8b8ae..d7208f918c4c 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/scripts.py @@ -22,24 +22,30 @@ """Create, read, update and delete and execute scripts in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union +from typing import Any, List, Dict, Union, Iterable, Optional import six from azure.cosmos._cosmos_client_connection import CosmosClientConnection +from ._base import build_options from .partition_key import NonePartitionKeyValue -from ._query_iterable import QueryIterable # pylint: disable=protected-access +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class ScriptType: +class ScriptType(object): StoredProcedure = "sprocs" Trigger = "triggers" UserDefinedFunction = "udfs" -class Scripts: +class ScriptsProxy(object): + """ + An interface to interact with stored procedures. + This class should not be instantiated directly, use :func:`ContainerProxy.scripts` attribute. + """ + def __init__(self, client_connection, container_link, is_system_key): # type: (CosmosClientConnection, str, bool) -> None self.client_connection = client_connection @@ -52,35 +58,37 @@ def _get_resource_link(self, script_or_id, typ): return u"{}/{}/{}".format(self.container_link, typ, script_or_id) return script_or_id["_self"] - def list_stored_procedures(self, max_item_count=None, feed_options=None): - # type: (int, Dict[str, Any]) -> QueryIterable - """ List all stored procedures in the container. + def list_stored_procedures(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all stored procedures in the container. - :param max_item_count: Max number of items to be returned in the enumeration operation. + :param int max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of stored procedures (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - return self.client_connection.ReadStoredProcedures(collection_link=self.container_link, options=feed_options) + return self.client_connection.ReadStoredProcedures( + collection_link=self.container_link, options=feed_options, **kwargs + ) - def query_stored_procedures(self, query, parameters=None, max_item_count=None, feed_options=None): - # type: (str, List, int, Dict[str, Any]) -> QueryIterable - """Return all stored procedures matching the given `query`. + def query_stored_procedures(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all stored procedures matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of stored procedures (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -88,99 +96,106 @@ def query_stored_procedures(self, query, parameters=None, max_item_count=None, f collection_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) - def get_stored_procedure(self, sproc, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> Dict[str, Any] + def get_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """ Get the stored procedure identified by `id`. :param sproc: The ID (name) or dict representing stored procedure to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved stored procedure. - :raise `HTTPFailure`: If the given stored procedure couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given stored procedure couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReadStoredProcedure( - sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs ) - def create_stored_procedure(self, body, request_options=None): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Create a stored procedure in the container. + def create_stored_procedure(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """ + Create a stored procedure in the container. + To replace an existing sproc, use the :func:`Container.scripts.replace_stored_procedure` method. :param body: A dict-like object representing the sproc to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new stored procedure. - :raise `HTTPFailure`: If the given stored procedure couldn't be created. - - To replace an existing sproc, use the :func:`Container.scripts.replace_stored_procedure` method. - + :raise `CosmosHttpResponseError`: If the given stored procedure couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.CreateStoredProcedure( - collection_link=self.container_link, sproc=body, options=request_options + collection_link=self.container_link, sproc=body, options=request_options, **kwargs ) - def replace_stored_procedure(self, sproc, body, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Replaces the specified stored procedure if it exists in the container. + def replace_stored_procedure(self, sproc, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """ + Replaces the specified stored procedure if it exists in the container. :param sproc: The ID (name) or dict representing stored procedure to be replaced. :param body: A dict-like object representing the sproc to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the stored procedure after replace went through. - :raise `HTTPFailure`: If the replace failed or the stored procedure with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the stored procedure with given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReplaceStoredProcedure( - sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), sproc=body, options=request_options + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), + sproc=body, + options=request_options, + **kwargs ) - def delete_stored_procedure(self, sproc, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> None - """ Delete the specified stored procedure from the container. + def delete_stored_procedure(self, sproc, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """ + Delete the specified stored procedure from the container. :param sproc: The ID (name) or dict representing stored procedure to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `HTTPFailure`: The sproc wasn't deleted successfully. If the sproc does not + :raises `CosmosHttpResponseError`: The sproc wasn't deleted successfully. If the sproc does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) self.client_connection.DeleteStoredProcedure( - sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options + sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), options=request_options, **kwargs ) def execute_stored_procedure( - self, sproc, partition_key=None, params=None, enable_script_logging=None, request_options=None + self, + sproc, # type: Union[str, Dict[str, Any]] + partition_key=None, # type: Optional[str] + params=None, # type: Optional[List[Any]] + enable_script_logging=None, # type: Optional[bool] + **kwargs # type: Any ): - # type: (Union[str, Dict[str, Any]], str, List[Any], bool, Dict[str, Any]) -> Any - """ execute the specified stored procedure. + # type: (...) -> Any + """ + Execute the specified stored procedure. :param sproc: The ID (name) or dict representing stored procedure to be executed. :param params: List of parameters to be passed to the stored procedure to be executed. - :param enable_script_logging: Enables or disables script logging for the current request. + :param bool enable_script_logging: Enables or disables script logging for the current request. :param partition_key: Specifies the partition key to indicate which partition the sproc should execute on. :param request_options: Dictionary of additional properties to be used for the request. - :returns: result of the executed stored procedure for the given parameters. - :raise `HTTPFailure`: If the stored procedure execution failed or if the stored procedure with + :returns: Result of the executed stored procedure for the given parameters. + :raise `CosmosHttpResponseError`: If the stored procedure execution failed or if the stored procedure with given id does not exists in the container. - + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) if partition_key is not None: request_options["partitionKey"] = ( CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key) @@ -194,37 +209,40 @@ def execute_stored_procedure( sproc_link=self._get_resource_link(sproc, ScriptType.StoredProcedure), params=params, options=request_options, + **kwargs ) - def list_triggers(self, max_item_count=None, feed_options=None): - # type: (int, Dict[str, Any]) -> QueryIterable - """ List all triggers in the container. + def list_triggers(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all triggers in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of triggers (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count - return self.client_connection.ReadTriggers(collection_link=self.container_link, options=feed_options) + return self.client_connection.ReadTriggers( + collection_link=self.container_link, options=feed_options, **kwargs + ) - def query_triggers(self, query, parameters=None, max_item_count=None, feed_options=None): - # type: (str, List, int, Dict[str, Any]) -> QueryIterable - """Return all triggers matching the given `query`. + def query_triggers(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all triggers matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of triggers (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -232,111 +250,113 @@ def query_triggers(self, query, parameters=None, max_item_count=None, feed_optio collection_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) - def get_trigger(self, trigger, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> Dict[str, Any] + def get_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """ Get the trigger identified by `id`. :param trigger: The ID (name) or dict representing trigger to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved trigger. - :raise `HTTPFailure`: If the given trigger couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given trigger couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReadTrigger( - trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs ) - def create_trigger(self, body, request_options=None): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Create a trigger in the container. + def create_trigger(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """ + Create a trigger in the container. + To replace an existing trigger, use the :func:`ContainerProxy.scripts.replace_trigger` method. :param body: A dict-like object representing the trigger to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new trigger. - :raise `HTTPFailure`: If the given trigger couldn't be created. - - To replace an existing trigger, use the :func:`Container.scripts.replace_trigger` method. - + :raise `CosmosHttpResponseError`: If the given trigger couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.CreateTrigger( - collection_link=self.container_link, trigger=body, options=request_options + collection_link=self.container_link, trigger=body, options=request_options, **kwargs ) - def replace_trigger(self, trigger, body, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Replaces the specified tigger if it exists in the container. + def replace_trigger(self, trigger, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """ + Replaces the specified tigger if it exists in the container. :param trigger: The ID (name) or dict representing trigger to be replaced. :param body: A dict-like object representing the trigger to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the trigger after replace went through. - :raise `HTTPFailure`: If the replace failed or the trigger with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the trigger with given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReplaceTrigger( - trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), trigger=body, options=request_options + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), + trigger=body, + options=request_options, + **kwargs ) - def delete_trigger(self, trigger, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> None - """ Delete the specified trigger from the container. + def delete_trigger(self, trigger, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """ + Delete the specified trigger from the container. :param trigger: The ID (name) or dict representing trigger to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `HTTPFailure`: The trigger wasn't deleted successfully. If the trigger does not + :raises `CosmosHttpResponseError`: The trigger wasn't deleted successfully. If the trigger does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) self.client_connection.DeleteTrigger( - trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options + trigger_link=self._get_resource_link(trigger, ScriptType.Trigger), options=request_options, **kwargs ) - def list_user_defined_functions(self, max_item_count=None, feed_options=None): - # type: (int, Dict[str, Any]) -> QueryIterable - """ List all user defined functions in the container. + def list_user_defined_functions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all user defined functions in the container. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of user defined functions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count return self.client_connection.ReadUserDefinedFunctions( - collection_link=self.container_link, options=feed_options + collection_link=self.container_link, options=feed_options, **kwargs ) - def query_user_defined_functions(self, query, parameters=None, max_item_count=None, feed_options=None): - # type: (str, List, int, Dict[str, Any]) -> QueryIterable - """Return all user defined functions matching the given `query`. + def query_user_defined_functions(self, query, parameters=None, max_item_count=None, **kwargs): + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all user defined functions matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. :param max_item_count: Max number of items to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :returns: An Iterable of user defined functions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -344,76 +364,79 @@ def query_user_defined_functions(self, query, parameters=None, max_item_count=No collection_link=self.container_link, query=query if parameters is None else dict(query=query, parameters=parameters), options=feed_options, + **kwargs ) - def get_user_defined_function(self, udf, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> Dict[str, Any] + def get_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> Dict[str, Any] """ Get the stored procedure identified by `id`. :param udf: The ID (name) or dict representing udf to retrieve. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the retrieved user defined function. - :raise `HTTPFailure`: If the given user defined function couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given user defined function couldn't be retrieved. + :rtype: Iterable[dict[str, Any]] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReadUserDefinedFunction( - udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs ) - def create_user_defined_function(self, body, request_options=None): - # type: (Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Create a user defined function in the container. + def create_user_defined_function(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Dict[str, Any] + """ + Create a user defined function in the container. + To replace an existing udf, use the :func:`ContainerProxy.scripts.replace_user_defined_function` method. :param body: A dict-like object representing the udf to create. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the new user defined function. - :raise `HTTPFailure`: If the given user defined function couldn't be created. - - To replace an existing udf, use the :func:`Container.scripts.replace_user_defined_function` method. - + :raise `CosmosHttpResponseError`: If the given user defined function couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.CreateUserDefinedFunction( - collection_link=self.container_link, udf=body, options=request_options + collection_link=self.container_link, udf=body, options=request_options, **kwargs ) - def replace_user_defined_function(self, udf, body, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any], Dict[str, Any]) -> Dict[str, Any] - """ Replaces the specified user defined function if it exists in the container. + def replace_user_defined_function(self, udf, body, **kwargs): + # type: (Union[str, Dict[str, Any]], Dict[str, Any], Any) -> Dict[str, Any] + """ + Replaces the specified user defined function if it exists in the container. :param udf: The ID (name) or dict representing udf to be replaced. :param body: A dict-like object representing the udf to replace. :param request_options: Dictionary of additional properties to be used for the request. :returns: A dict representing the user defined function after replace went through. - :raise `HTTPFailure`: If the replace failed or the user defined function with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the user defined function with + given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) return self.client_connection.ReplaceUserDefinedFunction( - udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), udf=body, options=request_options + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), + udf=body, + options=request_options, + **kwargs ) - def delete_user_defined_function(self, udf, request_options=None): - # type: (Union[str, Dict[str, Any]], Dict[str, Any]) -> None - """ Delete the specified user defined function from the container. + def delete_user_defined_function(self, udf, **kwargs): + # type: (Union[str, Dict[str, Any]], Any) -> None + """ + Delete the specified user defined function from the container. :param udf: The ID (name) or dict representing udf to be deleted. :param request_options: Dictionary of additional properties to be used for the request. - :raises `HTTPFailure`: The udf wasn't deleted successfully. If the udf does not + :raises `CosmosHttpResponseError`: The udf wasn't deleted successfully. If the udf does not exist in the container, a `404` error is returned. - + :rtype: None """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) self.client_connection.DeleteUserDefinedFunction( - udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options + udf_link=self._get_resource_link(udf, ScriptType.UserDefinedFunction), options=request_options, **kwargs ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py index b33a36fc81eb..9328548206b8 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/user.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/user.py @@ -19,19 +19,27 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs + """Create, read, update and delete permissions in the Azure Cosmos DB SQL API service. """ -from typing import Any, List, Dict, Union, cast +from typing import Any, List, Dict, Union, cast, Iterable, Optional import six -from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection +from ._base import build_options from .permission import Permission -class User: +class UserProxy(object): + """ + An interface to interact with a specific user. + This class should not be instantiated directly, use :func:`DatabaseProxy.get_user_client` method. + """ + def __init__(self, client_connection, id, database_link, properties=None): # pylint: disable=redefined-builtin # type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None self.client_connection = client_connection @@ -52,44 +60,45 @@ def _get_permission_link(self, permission_or_id): def _get_properties(self): # type: () -> Dict[str, Any] if self._properties is None: - self.read() + self._properties = self.read() return self._properties @distributed_trace - def read(self, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Optional[Callable]) -> User + def read(self, **kwargs): + # type: (Any) -> Dict[str, Any] """ Read user propertes. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :returns: A :class:`User` instance representing the retrieved user. - :raise `HTTPFailure`: If the given user couldn't be retrieved. - + :returns: A :class:`UserProxy` instance representing the retrieved user. + :raise `CosmosHttpResponseError`: If the given user couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) self._properties = self.client_connection.ReadUser(user_link=self.user_link, options=request_options, **kwargs) if response_hook: response_hook(self.client_connection.last_response_headers, self._properties) - return self._properties + return cast('Dict[str, Any]', self._properties) @distributed_trace - def read_all_permissions(self, max_item_count=None, feed_options=None, response_hook=None, **kwargs): - # type: (int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """ List all permission for the user. + def list_permissions(self, max_item_count=None, **kwargs): + # type: (Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + List all permission for the user. :param max_item_count: Max number of permissions to be returned in the enumeration operation. :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of permissions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -106,12 +115,11 @@ def query_permissions( query, parameters=None, max_item_count=None, - feed_options=None, - response_hook=None, **kwargs ): - # type: (str, List, int, Dict[str, Any], Optional[Callable]) -> QueryIterable - """Return all permissions matching the given `query`. + # type: (str, Optional[List[str]], Optional[int], Any) -> Iterable[Dict[str, Any]] + """ + Return all permissions matching the given `query`. :param query: The Azure Cosmos DB SQL query to execute. :param parameters: Optional array of parameters to the query. Ignored if no query is provided. @@ -119,10 +127,10 @@ def query_permissions( :param feed_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: An Iterable of permissions (dicts). - + :rtype: Iterable[dict[str, Any]] """ - if not feed_options: - feed_options = {} # type: Dict[str, Any] + feed_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) if max_item_count is not None: feed_options["maxItemCount"] = max_item_count @@ -139,8 +147,8 @@ def query_permissions( return result @distributed_trace - def get_permission(self, permission, request_options=None, response_hook=None, **kwargs): - # type: (str, Dict[str, Any], Optional[Callable]) -> Permission + def get_permission(self, permission, **kwargs): + # type: (str, Any) -> Permission """ Get the permission identified by `id`. @@ -149,43 +157,43 @@ def get_permission(self, permission, request_options=None, response_hook=None, * :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the retrieved permission. - :raise `HTTPFailure`: If the given permission couldn't be retrieved. - + :raise `CosmosHttpResponseError`: If the given permission couldn't be retrieved. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - permission = self.client_connection.ReadPermission( + permission_resp = self.client_connection.ReadPermission( permission_link=self._get_permission_link(permission), options=request_options, **kwargs - ) + ) # type: Dict[str, str] if response_hook: - response_hook(self.client_connection.last_response_headers, permission) + response_hook(self.client_connection.last_response_headers, permission_resp) return Permission( - id=permission["id"], + id=permission_resp["id"], user_link=self.user_link, - permission_mode=permission["permissionMode"], - resource_link=permission["resource"], - properties=permission, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, ) @distributed_trace - def create_permission(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission - """ Create a permission for the user. + def create_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """ + Create a permission for the user. + To update or replace an existing permision, use the :func:`UserProxy.upsert_permission` method. :param body: A dict-like object representing the permission to create. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the new permission. - :raise `HTTPFailure`: If the given permission couldn't be created. - - To update or replace an existing permision, use the :func:`User.upsert_permission` method. - + :raise `CosmosHttpResponseError`: If the given permission couldn't be created. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) permission = self.client_connection.CreatePermission( user_link=self.user_link, permission=body, options=request_options, **kwargs @@ -203,21 +211,21 @@ def create_permission(self, body, request_options=None, response_hook=None, **kw ) @distributed_trace - def upsert_permission(self, body, request_options=None, response_hook=None, **kwargs): - # type: (Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission - """ Insert or update the specified permission. + def upsert_permission(self, body, **kwargs): + # type: (Dict[str, Any], Any) -> Permission + """ + Insert or update the specified permission. + If the permission already exists in the container, it is replaced. If it does not, it is inserted. :param body: A dict-like object representing the permission to update or insert. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the upserted permission. - :raise `HTTPFailure`: If the given permission could not be upserted. - - If the permission already exists in the container, it is replaced. If it does not, it is inserted. + :raise `CosmosHttpResponseError`: If the given permission could not be upserted. + :rtype: dict[str, Any] """ - - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) permission = self.client_connection.UpsertPermission( user_link=self.user_link, permission=body, options=request_options, **kwargs @@ -235,9 +243,10 @@ def upsert_permission(self, body, request_options=None, response_hook=None, **kw ) @distributed_trace - def replace_permission(self, permission, body, request_options=None, response_hook=None, **kwargs): - # type: (str, Dict[str, Any], Dict[str, Any], Optional[Callable]) -> Permission - """ Replaces the specified permission if it exists for the user. + def replace_permission(self, permission, body, **kwargs): + # type: (str, Dict[str, Any], Any) -> Permission + """ + Replaces the specified permission if it exists for the user. :param permission: The ID (name), dict representing the properties or :class:`Permission` instance of the permission to be replaced. @@ -245,43 +254,43 @@ def replace_permission(self, permission, body, request_options=None, response_ho :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata :returns: A dict representing the permission after replace went through. - :raise `HTTPFailure`: If the replace failed or the permission with given id does not exist. - + :raise `CosmosHttpResponseError`: If the replace failed or the permission with given id does not exist. + :rtype: dict[str, Any] """ - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) - permission = self.client_connection.ReplacePermission( + permission_resp = self.client_connection.ReplacePermission( permission_link=self._get_permission_link(permission), permission=body, options=request_options, **kwargs - ) + ) # type: Dict[str, str] if response_hook: - response_hook(self.client_connection.last_response_headers, permission) + response_hook(self.client_connection.last_response_headers, permission_resp) return Permission( - id=permission["id"], + id=permission_resp["id"], user_link=self.user_link, - permission_mode=permission["permissionMode"], - resource_link=permission["resource"], - properties=permission, + permission_mode=permission_resp["permissionMode"], + resource_link=permission_resp["resource"], + properties=permission_resp, ) @distributed_trace - def delete_permission(self, permission, request_options=None, response_hook=None, **kwargs): - # type: (str, Dict[str, Any], Optional[Callable]) -> None - """ Delete the specified permission from the user. + def delete_permission(self, permission, **kwargs): + # type: (str, Any) -> None + """ + Delete the specified permission from the user. :param permission: The ID (name), dict representing the properties or :class:`Permission` instance of the permission to be replaced. :param request_options: Dictionary of additional properties to be used for the request. :param response_hook: a callable invoked with the response metadata - :raises `HTTPFailure`: The permission wasn't deleted successfully. If the permission does + :raises `CosmosHttpResponseError`: The permission wasn't deleted successfully. If the permission does not exist for the user, a `404` error is returned. - + :rtype: None """ - - if not request_options: - request_options = {} # type: Dict[str, Any] + request_options = build_options(kwargs) + response_hook = kwargs.pop('response_hook', None) result = self.client_connection.DeletePermission( permission_link=self._get_permission_link(permission), options=request_options, **kwargs diff --git a/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py index b800f818104e..1056c146bdbd 100644 --- a/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/ChangeFeedManagement/Program.py @@ -74,12 +74,8 @@ def run_sample(): # setup database for this sample try: db = client.create_database(id=DATABASE_ID) - - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + pass # setup container for this sample try: @@ -89,11 +85,8 @@ def run_sample(): ) print('Container with id \'{0}\' created'.format(CONTAINER_ID)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) ChangeFeedManagement.CreateItems(container, 100) ChangeFeedManagement.ReadChangeFeed(container) @@ -101,14 +94,10 @@ def run_sample(): # cleanup database after sample try: client.delete_database(db) + except errors.CosmosResourceNotFoundError: + pass - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) - - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py index 9bad40d4b0df..fa877125c4e0 100644 --- a/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/CollectionManagement/Program.py @@ -92,11 +92,8 @@ def create_Container(db, id): db.create_container(id=id, partition_key=partition_key) print('Container with id \'{0}\' created'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(id)) print("\n2.2 Create Container - With custom index policy") @@ -114,15 +111,13 @@ def create_Container(db, id): partition_key=partition_key, indexing_policy=coll['indexingPolicy'] ) + properties = container.read() print('Container with id \'{0}\' created'.format(container.id)) - print('IndexPolicy Mode - \'{0}\''.format(container.properties['indexingPolicy']['indexingMode'])) - print('IndexPolicy Automatic - \'{0}\''.format(container.properties['indexingPolicy']['automatic'])) + print('IndexPolicy Mode - \'{0}\''.format(properties['indexingPolicy']['indexingMode'])) + print('IndexPolicy Automatic - \'{0}\''.format(properties['indexingPolicy']['automatic'])) - except errors.CosmosError as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container['id'])) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(coll['id'])) print("\n2.3 Create Container - With custom offer throughput") @@ -135,11 +130,8 @@ def create_Container(db, id): ) print('Container with id \'{0}\' created'.format(container.id)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(coll['id'])) print("\n2.4 Create Container - With Unique keys") @@ -149,15 +141,13 @@ def create_Container(db, id): partition_key=partition_key, unique_key_policy={'uniqueKeys': [{'paths': ['/field1/field2', '/field3']}]} ) - unique_key_paths = container.properties['uniqueKeyPolicy']['uniqueKeys'][0]['paths'] + properties = container.read() + unique_key_paths = properties['uniqueKeyPolicy']['uniqueKeys'][0]['paths'] print('Container with id \'{0}\' created'.format(container.id)) print('Unique Key Paths - \'{0}\', \'{1}\''.format(unique_key_paths[0], unique_key_paths[1])) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'container_unique_keys\' already exists') print("\n2.5 Create Collection - With Partition key V2 (Default)") @@ -166,15 +156,12 @@ def create_Container(db, id): id="collection_partition_key_v2", partition_key=PartitionKey(path='/id', kind='Hash') ) - + properties = container.read() print('Container with id \'{0}\' created'.format(container.id)) - print('Partition Key - \'{0}\''.format(container.properties['partitionKey'])) + print('Partition Key - \'{0}\''.format(properties['partitionKey'])) - except errors.CosmosError as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'collection_partition_key_v2\' already exists') print("\n2.6 Create Collection - With Partition key V1") @@ -183,15 +170,12 @@ def create_Container(db, id): id="collection_partition_key_v1", partition_key=PartitionKey(path='/id', kind='Hash', version=1) ) - + properties = container.read() print('Container with id \'{0}\' created'.format(container.id)) - print('Partition Key - \'{0}\''.format(container.properties['partitionKey'])) + print('Partition Key - \'{0}\''.format(properties['partitionKey'])) - except errors.CosmosError as e: - if e.status_code == 409: - print('A container with id \'{0}\' already exists'.format(container.id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'collection_partition_key_v1\' already exists') @staticmethod def manage_offer_throughput(db, id): @@ -211,11 +195,8 @@ def manage_offer_throughput(db, id): print('Found Offer \'{0}\' for Container \'{1}\' and its throughput is \'{2}\''.format(offer.properties['id'], container.id, offer.properties['content']['offerThroughput'])) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A container with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A container with id \'{0}\' does not exist'.format(id)) print("\n3.2 Change Offer Throughput of Container") @@ -233,11 +214,8 @@ def read_Container(db, id): container = db.get_container_client(id) print('Container with id \'{0}\' was found, it\'s link is {1}'.format(container.id, container.container_link)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A container with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A container with id \'{0}\' does not exist'.format(id)) @staticmethod def list_Containers(db): @@ -245,7 +223,7 @@ def list_Containers(db): print('Containers:') - containers = list(db.read_all_containers()) + containers = list(db.list_containers()) if not containers: return @@ -262,11 +240,8 @@ def delete_Container(db, id): print('Container with id \'{0}\' was deleted'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A container with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A container with id \'{0}\' does not exist'.format(id)) def run_sample(): @@ -276,11 +251,8 @@ def run_sample(): try: db = client.create_database(id=DATABASE_ID) - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + db = client.get_database_client(DATABASE_ID) # query for a container ContainerManagement.find_container(db, CONTAINER_ID) @@ -304,13 +276,10 @@ def run_sample(): try: client.delete_database(db) - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + pass - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py index f424c98ffa3e..f1535c93961b 100644 --- a/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/DatabaseManagement/Program.py @@ -68,11 +68,8 @@ def create_database(client, id): client.create_database(id=id) print('Database with id \'{0}\' created'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('A database with id \'{0}\' already exists'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('A database with id \'{0}\' already exists'.format(id)) @staticmethod def read_database(client, id): @@ -82,11 +79,8 @@ def read_database(client, id): database = client.get_database_client(id) print('Database with id \'{0}\' was found, it\'s link is {1}'.format(id, database.database_link)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A database with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A database with id \'{0}\' does not exist'.format(id)) @staticmethod def list_databases(client): @@ -94,7 +88,7 @@ def list_databases(client): print('Databases:') - databases = list(client.read_all_databases()) + databases = list(client.list_databases()) if not databases: return @@ -111,11 +105,8 @@ def delete_database(client, id): print('Database with id \'{0}\' was deleted'.format(id)) - except errors.HTTPFailure as e: - if e.status_code == 404: - print('A database with id \'{0}\' does not exist'.format(id)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + print('A database with id \'{0}\' does not exist'.format(id)) def run_sample(): with IDisposable(cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY} )) as client: @@ -135,7 +126,7 @@ def run_sample(): # delete database by id DatabaseManagement.delete_database(client, DATABASE_ID) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py index 51754fe8dbf9..31f2953f3dae 100644 --- a/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/DocumentManagement/Program.py @@ -179,22 +179,16 @@ def run_sample(): try: db = client.create_database(id=DATABASE_ID) - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + pass # setup container for this sample try: container = db.create_container(id=CONTAINER_ID, partition_key=PartitionKey(path='/id', kind='Hash')) print('Container with id \'{0}\' created'.format(CONTAINER_ID)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) ItemManagement.CreateItems(container) ItemManagement.ReadItem(container, 'SalesOrder1') @@ -208,13 +202,10 @@ def run_sample(): try: client.delete_database(db) - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceNotFoundError: + pass - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py b/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py index b50b5c684a96..d4a945ac699b 100644 --- a/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/IndexManagement/Program.py @@ -42,7 +42,8 @@ def ObtainClient(): urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) connection_policy.SSLConfiguration.SSLCaCerts = False - return cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY}, "Session", connection_policy) + return cosmos_client.CosmosClient(HOST, MASTER_KEY, "Session", connection_policy=connection_policy) + # Query for Entity / Entities def Query_Entities(parent, entity_type, id = None): @@ -56,13 +57,13 @@ def Query_Entities(parent, entity_type, id = None): try: if entity_type == 'database': if id == None: - entities = list(parent.read_all_databases()) + entities = list(parent.list_databases()) else: entities = list(parent.query_databases(find_entity_by_id_query)) elif entity_type == 'collection': if id == None: - entities = list(parent.read_all_containers()) + entities = list(parent.list_containers()) else: entities = list(parent.query_containers(find_entity_by_id_query)) @@ -71,7 +72,7 @@ def Query_Entities(parent, entity_type, id = None): entities = list(parent.read_all_items()) else: entities = list(parent.query_items(find_entity_by_id_query)) - except errors.CosmosError as e: + except errors.AzureError as e: print("The following error occured while querying for the entity / entities ", entity_type, id if id != None else "") print(e) raise @@ -81,36 +82,36 @@ def Query_Entities(parent, entity_type, id = None): return entities[0] return None + def CreateDatabaseIfNotExists(client, database_id): try: database = Query_Entities(client, 'database', id = database_id) if database == None: - database = client.create_database(id=database_id) - return client.get_database_client(database['id']) - except errors.HTTPFailure as e: - if e.status_code == 409: # Move these constants to an enum - pass - else: - raise errors.HTTPFailure(e.status_code) + return client.create_database(id=database_id) + else: + return client.get_database_client(database_id) + except errors.CosmosResourceExistsError: + pass + def DeleteContainerIfExists(db, collection_id): try: db.delete_container(collection_id) print('Collection with id \'{0}\' was deleted'.format(collection_id)) - except errors.HTTPFailure as e: - if e.status_code == 404: - pass - elif e.status_code == 400: + except errors.CosmosResourceNotFoundError: + pass + except errors.CosmosHttpResponseError as e: + if e.status_code == 400: print("Bad request for collection link", collection_id) - raise - else: - raise + raise + def print_dictionary_items(dict): for k, v in dict.items(): print("{:<15}".format(k), v) print() + def FetchAllDatabases(client): databases = Query_Entities(client, 'database') print("-" * 41) @@ -119,6 +120,7 @@ def FetchAllDatabases(client): print_dictionary_items(db) print("-" * 41) + def QueryDocumentsWithCustomQuery(container, query_with_optional_parameters, message = "Document(s) found by query: "): try: results = list(container.query_items(query_with_optional_parameters, enable_cross_partition_query=True)) @@ -126,10 +128,10 @@ def QueryDocumentsWithCustomQuery(container, query_with_optional_parameters, mes for doc in results: print(doc) return results - except errors.HTTPFailure as e: - if e.status_code == 404: - print("Document doesn't exist") - elif e.status_code == 400: + except errors.CosmosResourceNotFoundError: + print("Document doesn't exist") + except errors.CosmosHttpResponseError as e: + if e.status_code == 400: # Can occur when we are trying to query on excluded paths print("Bad Request exception occured: ", e) pass @@ -138,6 +140,7 @@ def QueryDocumentsWithCustomQuery(container, query_with_optional_parameters, mes finally: print() + def ExplicitlyExcludeFromIndex(db): """ The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. There may be scenarios where you want to exclude a specific doc from the index even though all other @@ -153,7 +156,8 @@ def ExplicitlyExcludeFromIndex(db): print(created_Container) print("\n" + "-" * 25 + "\n1. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + properties = created_Container.read() + print_dictionary_items(properties["indexingPolicy"]) # Create a document and query on it immediately. # Will work as automatic indexing is still True @@ -190,14 +194,11 @@ def ExplicitlyExcludeFromIndex(db): # Cleanup db.delete_container(created_Container) print("\n") - - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def UseManualIndexing(db): """The default index policy on a DocumentContainer will AUTOMATICALLY index ALL documents added. @@ -214,10 +215,11 @@ def UseManualIndexing(db): indexing_policy={"automatic" : False}, partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n2. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Create a document # Then query for that document @@ -254,14 +256,11 @@ def UseManualIndexing(db): # Cleanup db.delete_container(created_Container) print("\n") + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise def ExcludePathsFromIndex(db): """The default behavior is for Cosmos to index every attribute in every document automatically. @@ -300,9 +299,10 @@ def ExcludePathsFromIndex(db): indexing_policy=collection_to_create['indexingPolicy'], partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n4. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # The effect of the above IndexingPolicy is that only id, foo, and the subDoc/searchable are indexed doc = created_Container.create_item(body=doc_with_nested_structures) @@ -329,14 +329,11 @@ def ExcludePathsFromIndex(db): # Cleanup db.delete_container(created_Container) print("\n") + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise def RangeScanOnHashIndex(db): """When a range index is not available (i.e. Only hash or no index found on the path), comparisons queries can still @@ -365,9 +362,10 @@ def RangeScanOnHashIndex(db): indexing_policy=collection_to_create['indexingPolicy'], partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n5. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) doc1 = created_Container.create_item(body={ "id" : "dyn1", "length" : 10, "width" : 5, "height" : 15 }) doc2 = created_Container.create_item(body={ "id" : "dyn2", "length" : 7, "width" : 15 }) @@ -393,13 +391,11 @@ def RangeScanOnHashIndex(db): # Cleanup db.delete_container(created_Container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def UseRangeIndexesOnStrings(db): """Showing how range queries can be performed even on strings. @@ -458,9 +454,10 @@ def UseRangeIndexesOnStrings(db): indexing_policy=collection_definition['indexingPolicy'], partition_key=PARTITION_KEY ) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n6. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) created_Container.create_item(body={ "id" : "doc1", "region" : "USA" }) created_Container.create_item(body={ "id" : "doc2", "region" : "UK" }) @@ -481,13 +478,11 @@ def UseRangeIndexesOnStrings(db): # Cleanup db.delete_container(created_Container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def PerformIndexTransformations(db): try: @@ -495,21 +490,22 @@ def PerformIndexTransformations(db): # Create a collection with default indexing policy created_Container = db.create_container(id=CONTAINER_ID, partition_key=PARTITION_KEY) + properties = created_Container.read() print(created_Container) print("\n" + "-" * 25 + "\n7. Collection created with index policy") - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Insert some documents doc1 = created_Container.create_item(body={ "id" : "dyn1", "length" : 10, "width" : 5, "height" : 15 }) doc2 = created_Container.create_item(body={ "id" : "dyn2", "length" : 7, "width" : 15 }) doc3 = created_Container.create_item(body={ "id" : "dyn3", "length" : 2 }) - print("Three docs created with ids : ", doc1["id"], doc2["id"], doc3["id"], " with indexing mode", created_Container.properties['indexingPolicy']['indexingMode']) + print("Three docs created with ids : ", doc1["id"], doc2["id"], doc3["id"], " with indexing mode", properties['indexingPolicy']['indexingMode']) # Switch to use string & number range indexing with maximum precision. print("Changing to string & number range indexing with maximum precision (needed for Order By).") - created_Container.properties['indexingPolicy']['includedPaths'][0]['indexes'] = [{ + properties['indexingPolicy']['includedPaths'][0]['indexes'] = [{ 'kind': documents.IndexKind.Range, 'dataType': documents.DataType.String, 'precision': -1 @@ -518,34 +514,34 @@ def PerformIndexTransformations(db): created_Container = db.replace_container( container=created_Container.id, partition_key=PARTITION_KEY, - indexing_policy=created_Container.properties['indexingPolicy'] + indexing_policy=properties['indexingPolicy'] ) + properties = created_Container.read() # Check progress and wait for completion - should be instantaneous since we have only a few documents, but larger # collections will take time. - print_dictionary_items(created_Container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Now exclude a path from indexing to save on storage space. print("Now excluding the path /length/ to save on storage space") - created_Container.properties['indexingPolicy']['excludedPaths'] = [{"path" : "/length/*"}] + properties['indexingPolicy']['excludedPaths'] = [{"path" : "/length/*"}] created_Container = db.replace_container( container=created_Container.id, partition_key=PARTITION_KEY, - indexing_policy=created_Container.properties['indexingPolicy'] + indexing_policy=properties['indexingPolicy'] ) - print_dictionary_items(created_Container.properties["indexingPolicy"]) + properties = created_Container.read() + print_dictionary_items(properties["indexingPolicy"]) # Cleanup db.delete_container(created_Container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") + def PerformMultiOrderbyQuery(db): try: @@ -590,11 +586,11 @@ def PerformMultiOrderbyQuery(db): indexing_policy=indexing_policy, partition_key=PARTITION_KEY ) - + properties = created_container.read() print(created_container) print("\n" + "-" * 25 + "\n8. Collection created with index policy") - print_dictionary_items(created_container.properties["indexingPolicy"]) + print_dictionary_items(properties["indexingPolicy"]) # Insert some documents doc1 = created_container.create_item(body={"id": "doc1", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "1"}) @@ -632,109 +628,11 @@ def PerformMultiOrderbyQuery(db): # Cleanup db.delete_container(created_container) print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise - -def PerformMultiOrderbyQuery(client, database_id): - try: - DeleteContainerIfExists(client, database_id, COLLECTION_ID) - database_link = GetDatabaseLink(database_id) - - # Create a collection with composite indexes - indexingPolicy = { - "compositeIndexes": [ - [ - { - "path": "/numberField", - "order": "ascending" - }, - { - "path": "/stringField", - "order": "descending" - } - ], - [ - { - "path": "/numberField", - "order": "descending" - }, - { - "path": "/stringField", - "order": "ascending" - }, - { - "path": "/numberField2", - "order": "descending" - }, - { - "path": "/stringField2", - "order": "ascending" - } - ] - ] - } - - container_definition = { - 'id': COLLECTION_ID, - 'indexingPolicy': indexingPolicy - } - - created_container = client.CreateContainer(database_link, container_definition) - - print(created_container) - - print("\n" + "-" * 25 + "\n8. Collection created with index policy") - print_dictionary_items(created_container["indexingPolicy"]) - - # Insert some documents - collection_link = GetContainerLink(database_id, COLLECTION_ID) - doc1 = client.CreateItem(collection_link, {"id": "doc1", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "1"}) - doc2 = client.CreateItem(collection_link, {"id": "doc2", "numberField": 1, "stringField": "1", "numberField2": 1, "stringField2": "2"}) - doc3 = client.CreateItem(collection_link, {"id": "doc3", "numberField": 1, "stringField": "1", "numberField2": 2, "stringField2": "1"}) - doc4 = client.CreateItem(collection_link, {"id": "doc4", "numberField": 1, "stringField": "1", "numberField2": 2, "stringField2": "2"}) - doc5 = client.CreateItem(collection_link, {"id": "doc5", "numberField": 1, "stringField": "2", "numberField2": 1, "stringField2": "1"}) - doc6 = client.CreateItem(collection_link, {"id": "doc6", "numberField": 1, "stringField": "2", "numberField2": 1, "stringField2": "2"}) - doc7 = client.CreateItem(collection_link, {"id": "doc7", "numberField": 1, "stringField": "2", "numberField2": 2, "stringField2": "1"}) - doc8 = client.CreateItem(collection_link, {"id": "doc8", "numberField": 1, "stringField": "2", "numberField2": 2, "stringField2": "2"}) - doc9 = client.CreateItem(collection_link, {"id": "doc9", "numberField": 2, "stringField": "1", "numberField2": 1, "stringField2": "1"}) - doc10 = client.CreateItem(collection_link, {"id": "doc10", "numberField": 2, "stringField": "1", "numberField2": 1, "stringField2": "2"}) - doc11 = client.CreateItem(collection_link, {"id": "doc11", "numberField": 2, "stringField": "1", "numberField2": 2, "stringField2": "1"}) - doc12 = client.CreateItem(collection_link, {"id": "doc12", "numberField": 2, "stringField": "1", "numberField2": 2, "stringField2": "2"}) - doc13 = client.CreateItem(collection_link, {"id": "doc13", "numberField": 2, "stringField": "2", "numberField2": 1, "stringField2": "1"}) - doc14 = client.CreateItem(collection_link, {"id": "doc14", "numberField": 2, "stringField": "2", "numberField2": 1, "stringField2": "2"}) - doc15 = client.CreateItem(collection_link, {"id": "doc15", "numberField": 2, "stringField": "2", "numberField2": 2, "stringField2": "1"}) - doc16 = client.CreateItem(collection_link, {"id": "doc16", "numberField": 2, "stringField": "2", "numberField2": 2, "stringField2": "2"}) - - print("Query documents and Order by 1st composite index: Ascending numberField and Descending stringField:") - - query = { - "query": "SELECT * FROM r ORDER BY r.numberField ASC, r.stringField DESC", - } - QueryDocumentsWithCustomQuery(client, collection_link, query) - - print("Query documents and Order by inverted 2nd composite index -") - print("Ascending numberField, Descending stringField, Ascending numberField2, Descending stringField2") - - query = { - "query": "SELECT * FROM r ORDER BY r.numberField ASC, r.stringField DESC, r.numberField2 ASC, r.stringField2 DESC", - } - QueryDocumentsWithCustomQuery(client, collection_link, query) + except errors.CosmosResourceExistsError: + print("Entity already exists") + except errors.CosmosResourceNotFoundError: + print("Entity doesn't exist") - # Cleanup - client.DeleteContainer(collection_link) - print("\n") - except errors.HTTPFailure as e: - if e.status_code == 409: - print("Entity already exists") - elif e.status_code == 404: - print("Entity doesn't exist") - else: - raise def RunIndexDemo(): try: @@ -766,10 +664,7 @@ def RunIndexDemo(): # 8. Perform Multi Orderby queries using composite indexes PerformMultiOrderbyQuery(created_db) - # 8. Perform Multi Orderby queries using composite indexes - PerformMultiOrderbyQuery(client, DATABASE_ID) - - except errors.CosmosError as e: + except errors.AzureError as e: raise e if __name__ == '__main__': @@ -777,4 +672,4 @@ def RunIndexDemo(): RunIndexDemo() except Exception as e: - print("Top level Error: args:{0}, message:N/A".format(e.args)) + print("Top level Error: args:{0}, message:N/A".format(e.args)) diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py index a714278dc71d..40f64026ebec 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/ConflictWorker.py @@ -27,11 +27,8 @@ def initialize_async(self): database = None try: database = create_client.ReadDatabase("dbs/" + self.database_name) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("database not found, needs to be created.") - else: - raise e + except errors.CosmosResourceNotFoundError: + print("database not found, needs to be created.") if not database: database = {'id': self.database_name} @@ -122,20 +119,15 @@ def initialize_async(self): } try: lww_sproc = create_client.CreateStoredProcedure("dbs/" + self.database_name+ "/colls/" + self.udp_collection_name, lww_sproc) - except errors.CosmosError as e: - if e.status_code == StatusCodes.CONFLICT: - return - raise e + except errors.CosmosResourceExistsError: + return def try_create_document_collection (self, client, database, collection): read_collection = None try: read_collection = client.ReadContainer("dbs/" + database['id'] + "/colls/" + collection['id']) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("collection not found, needs to be created.") - else: - raise errors + except errors.CosmosResourceNotFoundError: + print("collection not found, needs to be created.") if read_collection == None: collection['partitionKey'] = {'paths': ['/id'],'kind': 'Hash'} @@ -481,33 +473,25 @@ def run_delete_conflict_on_UDP_async(self): def try_insert_document(self, client, collection_uri, document): try: return client.CreateItem(collection_uri, document) - except errors.CosmosError as e: - if e.status_code == StatusCodes.CONFLICT: - return None - raise e + except errors.CosmosResourceExistsError: + return None def try_update_document(self, client, collection_uri, document, options): try: options['partitionKey'] = document['id'] return client.ReplaceItem(collection_uri + "/docs/" + document['id'], document, options); - except errors.CosmosError as e: - if (e.status_code == StatusCodes.PRECONDITION_FAILED or - e.status_code == StatusCodes.NOT_FOUND): - # Lost synchronously or no document yet. No conflict is induced. - return None - raise e + except (errors.CosmosResourceNotFoundError, errors.CosmosAccessConditionFailedError): + # Lost synchronously or no document yet. No conflict is induced. + return None def try_delete_document(self, client, collection_uri, document, options): try: options['partitionKey'] = document['id'] client.DeleteItem(collection_uri + "/docs/" + document['id'], options) return document - except errors.CosmosError as e: - if (e.status_code == StatusCodes.PRECONDITION_FAILED or - e.status_code == StatusCodes.NOT_FOUND): - #Lost synchronously. No conflict is induced. - return None - raise e + except (errors.CosmosResourceNotFoundError, errors.CosmosAccessConditionFailedError): + #Lost synchronously. No conflict is induced. + return None def try_update_or_delete_document(self, client, collection_uri, conflict_document, options): if int(conflict_document['regionId']) % 2 == 1: @@ -607,16 +591,14 @@ def validate_LWW_async_internal(self, client, conflict_document, has_delete_conf (conflict_document[0]['id'], client.ReadEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("Delete conflict won @ %s" % client.ReadEndpoint) - return - else: - - self.trace_error("Delete conflict for document %s didnt win @ %s" % - (conflict_document[0]['id'], client.ReadEndpoint)) + except errors.CosmosResourceNotFoundError: + print("Delete conflict won @ %s" % client.ReadEndpoint) + return + except errors.CosmosHttpResponseError: + self.trace_error("Delete conflict for document %s didnt win @ %s" % + (conflict_document[0]['id'], client.ReadEndpoint)) - time.sleep(0.5) + time.sleep(0.5) winner_document = None @@ -640,7 +622,7 @@ def validate_LWW_async_internal(self, client, conflict_document, has_delete_conf (int(winner_document["regionId"]), client.WriteEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: + except errors.AzureError as e: self.trace_error("Winner document from region %d is not found @ %s, retrying..." % (int(winner_document["regionId"]), client.WriteEndpoint)) @@ -673,15 +655,13 @@ def validate_UDP_async_internal(self, client, conflict_document, has_delete_conf (conflict_document[0]['id'], client.ReadEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: - if e.status_code == StatusCodes.NOT_FOUND: - print("Delete conflict won @ %s" % client.ReadEndpoint) - return - else: - self.trace_error("Delete conflict for document %s didnt win @ %s" % - (conflict_document[0]['id'], client.ReadEndpoint)) - - time.sleep(0.5) + except errors.CosmosResourceNotFoundError: + print("Delete conflict won @ %s" % client.ReadEndpoint) + return + except errors.CosmosHttpResponseError: + self.trace_error("Delete conflict for document %s didnt win @ %s" % + (conflict_document[0]['id'], client.ReadEndpoint)) + time.sleep(0.5) winner_document = None @@ -705,10 +685,9 @@ def validate_UDP_async_internal(self, client, conflict_document, has_delete_conf (int(winner_document['regionId']), client.WriteEndpoint)) time.sleep(0.5) - except errors.CosmosError as e: + except errors.AzureError: self.trace_error("Winner document from region %d is not found @ %s, retrying..." % (int(winner_document['regionId']), client.WriteEndpoint)) - time.sleep(0.5) def trace_error(self, message): diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py index 8b97f0899838..453f8caa9fe9 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/MultiMasterScenario.py @@ -27,7 +27,12 @@ def __init__(self): connection_policy.UseMultipleWriteLocations = True connection_policy.PreferredLocations = [region] - client = cosmos_client_connection.CosmosClientConnection(self.account_endpoint, {'masterKey': self.account_key}, connection_policy, documents.ConsistencyLevel.Session) + client = cosmos_client_connection.CosmosClientConnection( + self.account_endpoint, + {'masterKey': self.account_key}, + connection_policy, + documents.ConsistencyLevel.Session + ) self.workers.append(Worker(client, self.database_name, self.basic_collection_name)) diff --git a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py index 28eeaefe5291..38af9a920314 100644 --- a/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py +++ b/sdk/cosmos/azure-cosmos/samples/MultiMasterOperations/Worker.py @@ -62,10 +62,10 @@ def delete_all_async(self): while doc: try: self.client.DeleteItem(doc['_self'], {'partitionKey': doc['id']}) - except errors.CosmosError as e: - if e.status_code != StatusCodes.NOT_FOUND: - print("Error occurred while deleting document from %s" % self.client.WriteEndpoint) - else: - raise e + except errors.CosmosResourceNotFoundError: + raise + except errors.CosmosHttpResponseError as e: + print("Error occurred while deleting document from %s" % self.client.WriteEndpoint) + doc = next(it, None) print("Deleted all documents from region %s" % self.client.WriteEndpoint) \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py b/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py index baaf8f5e41b8..5be4ad1345a9 100644 --- a/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py +++ b/sdk/cosmos/azure-cosmos/samples/NonPartitionedCollectionOperations/Program.py @@ -97,7 +97,7 @@ def CreateNonPartitionedCollection(db): # python 3 compatible: convert data from byte to unicode string data = data.decode('utf-8') data = json.loads(data) - created_collection = db.get_container_client(data['id']) + created_collection = db.get_container_client("mycoll") # Create a document in the non partitioned collection using the rest API and older version resource_url = base_url_split[0] + ":" + base_url_split[1] + ":" + base_url_split[2].split("/")[0] \ @@ -122,7 +122,7 @@ def CreateNonPartitionedCollection(db): data = data.decode('utf-8') data = json.loads(data) created_document = data - return created_collection, created_document + return created_collection, "SaledOrder0" @staticmethod def get_authorization(client, verb, resource_id_or_fullname, resource_type, headers): @@ -162,7 +162,7 @@ def ReadItem(container, doc_id): print('\n1.2 Reading Item by Id\n') # Note that Reads require a partition key to be spcified. - response = container.read_item(id=doc_id, partition_key=partition_key.NonePartitionKeyValue) + response = container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) print('Item read by Id {0}'.format(doc_id)) print('Account Number: {0}'.format(response.get('account_number'))) @@ -175,7 +175,7 @@ def ReadItems(container): # NOTE: Use MaxItemCount on Options to control how many items come back per trip to the server # Important to handle throttles whenever you are doing operations such as this that might # result in a 429 (throttled request) - item_list = list(container.list_items(max_item_count=10)) + item_list = list(container.read_all_items(max_item_count=10)) print('Found {0} items'.format(item_list.__len__())) @@ -201,7 +201,7 @@ def QueryItems(container, doc_id): def ReplaceItem(container, doc_id): print('\n1.5 Replace an Item\n') - read_item = container.read_item(id=doc_id, partition_key=partition_key.NonePartitionKeyValue) + read_item = container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) read_item['subtotal'] = read_item['subtotal'] + 1 response = container.replace_item(item=read_item, body=read_item) @@ -211,7 +211,7 @@ def ReplaceItem(container, doc_id): def UpsertItem(container, doc_id): print('\n1.6 Upserting an item\n') - read_item = container.read_item(id=doc_id, partition_key=partition_key.NonePartitionKeyValue) + read_item = container.read_item(doc_id, partition_key=partition_key.NonePartitionKeyValue) read_item['subtotal'] = read_item['subtotal'] + 1 response = container.upsert_item(body=read_item) @@ -285,26 +285,19 @@ def run_sample(): # setup database for this sample try: db = client.create_database(id=DATABASE_ID) - - except errors.HTTPFailure as e: - if e.status_code == 409: - pass - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + db = client.get_database_client(DATABASE_ID) # setup container for this sample try: container, document = ItemManagement.CreateNonPartitionedCollection(db) print('Container with id \'{0}\' created'.format(CONTAINER_ID)) - except errors.HTTPFailure as e: - if e.status_code == 409: - print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) - else: - raise errors.HTTPFailure(e.status_code) + except errors.CosmosResourceExistsError: + print('Container with id \'{0}\' was found'.format(CONTAINER_ID)) # Read Item created in non partitioned collection using older API version - ItemManagement.ReadItem(container, document['id']) + ItemManagement.ReadItem(container, document) ItemManagement.CreateItems(container) ItemManagement.ReadItems(container) ItemManagement.QueryItems(container, 'SalesOrder1') @@ -315,14 +308,10 @@ def run_sample(): # cleanup database after sample try: client.delete_database(db) + except errors.CosmosResourceNotFoundError: + pass - except errors.CosmosError as e: - if e.status_code == 404: - pass - else: - raise errors.HTTPFailure(e.status_code) - - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: print('\nrun_sample has caught an error. {0}'.format(e.message)) finally: diff --git a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py index c8dbdf131109..a10343334234 100644 --- a/sdk/cosmos/azure-cosmos/test/aggregate_tests.py +++ b/sdk/cosmos/azure-cosmos/test/aggregate_tests.py @@ -31,7 +31,7 @@ import azure.cosmos.cosmos_client as cosmos_client import azure.cosmos.documents as documents import test_config -from azure.cosmos.errors import HTTPFailure +from azure.cosmos.errors import CosmosHttpResponseError from azure.cosmos.partition_key import PartitionKey pytestmark = pytest.mark.cosmosEmulator @@ -64,8 +64,8 @@ def _setup(): "'masterKey' and 'host' at the top of this class to run the " "tests.") - mcs.client = cosmos_client.CosmosClient(_config.host, - {'masterKey': _config.master_key}, "Session", _config.connection_policy) + mcs.client = cosmos_client.CosmosClient( + _config.host, _config.master_key, "Session", connection_policy=_config.connection_policy) created_db = test_config._test_config.create_database_if_not_exist(mcs.client) mcs.created_collection = _create_collection(created_db) @@ -221,19 +221,21 @@ def invokeNext(): self.assertRaises(StopIteration, invokeNext) ###################################### - # test fetch_next_block() behavior + # test by_page() behavior ###################################### - fetched_res = result_iterable.fetch_next_block() + page_iter = result_iterable.by_page() + fetched_res = list(next(page_iter)) fetched_size = len(fetched_res) self.assertEqual(fetched_size, 1) self.assertEqual(fetched_res[0], expected) # no more results will be returned - self.assertEqual(result_iterable.fetch_next_block(), []) + with self.assertRaises(StopIteration): + next(page_iter) if isinstance(expected, Exception): - self.assertRaises(HTTPFailure, _verify_result) + self.assertRaises(CosmosHttpResponseError, _verify_result) else: _verify_result() diff --git a/sdk/cosmos/azure-cosmos/test/conftest.py b/sdk/cosmos/azure-cosmos/test/conftest.py index e0548dcc6449..cbb2191bc775 100644 --- a/sdk/cosmos/azure-cosmos/test/conftest.py +++ b/sdk/cosmos/azure-cosmos/test/conftest.py @@ -39,7 +39,7 @@ def delete_database(): masterKey = config.masterKey connectionPolicy = config.connectionPolicy try: - client = cosmos_client.CosmosClient(host, {'masterKey': masterKey}, "Session", connectionPolicy) + client = cosmos_client.CosmosClient(host, masterKey, "Session", connection_policy=connectionPolicy) # This is to soft-fail the teardown while cosmos tests are not running automatically except Exception: pass @@ -48,9 +48,8 @@ def delete_database(): for database_id in database_ids_to_delete: try: client.delete_database(database_id) - except errors.HTTPFailure as e: - if e.status_code != StatusCodes.NOT_FOUND: - raise e + except errors.CosmosResourceNotFoundError: + pass del database_ids_to_delete[:] print("Clean up completed!") diff --git a/sdk/cosmos/azure-cosmos/test/crud_tests.py b/sdk/cosmos/azure-cosmos/test/crud_tests.py index 0f093271d7c3..14529aede445 100644 --- a/sdk/cosmos/azure-cosmos/test/crud_tests.py +++ b/sdk/cosmos/azure-cosmos/test/crud_tests.py @@ -84,7 +84,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, func, *args, **kwargs): try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) @classmethod @@ -95,22 +95,22 @@ def setUpClass(cls): "You must specify your Azure Cosmos account values for " "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.databaseForTest = cls.configs.create_database_if_not_exist(cls.client) def setUp(self): - self.client = cosmos_client.CosmosClient(self.host, {'masterKey':self.masterKey}, "Session", - self.connectionPolicy) + self.client = cosmos_client.CosmosClient(self.host, self.masterKey, "Session", + connection_policy=self.connectionPolicy) def test_database_crud(self): # read databases. - databases = list(self.client.read_all_databases()) + databases = list(self.client.list_databases()) # create a database. before_create_databases_count = len(databases) database_id = str(uuid.uuid4()) created_db = self.client.create_database(database_id) self.assertEqual(created_db.id, database_id) # Read databases after creation. - databases = list(self.client.read_all_databases()) + databases = list(self.client.list_databases()) self.assertEqual(len(databases), before_create_databases_count + 1, 'create should increase the number of databases') @@ -121,8 +121,7 @@ def test_database_crud(self): {'name': '@id', 'value': database_id} ] })) - self.assert_(databases, - 'number of results for the query should be > 0') + self.assertTrue(databases, 'number of results for the query should be > 0') # read database. self.client.get_database_client(created_db.id) @@ -149,12 +148,12 @@ def test_database_level_offer_throughput(self): # Verify offer throughput for database offer = created_db.read_offer() - self.assertEquals(offer.offer_throughput, offer_throughput) + self.assertEqual(offer.offer_throughput, offer_throughput) # Update database offer throughput new_offer_throughput = 2000 offer = created_db.replace_throughput(new_offer_throughput) - self.assertEquals(offer.offer_throughput, new_offer_throughput) + self.assertEqual(offer.offer_throughput, new_offer_throughput) def test_sql_query_crud(self): # create two databases. @@ -185,7 +184,7 @@ def test_sql_query_crud(self): def test_collection_crud(self): created_db = self.databaseForTest - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) # create a collection before_create_collections_count = len(collections) collection_id = 'test_collection_crud ' + str(uuid.uuid4()) @@ -205,7 +204,7 @@ def test_collection_crud(self): self.assertEqual('consistent', created_properties['indexingPolicy']['indexingMode']) # read collections after creation - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) self.assertEqual(len(collections), before_create_collections_count + 1, 'create should increase the number of collections') @@ -303,7 +302,7 @@ def test_partitioned_collection_partition_key_extraction(self): # create document without partition key being specified created_document = created_collection.create_item(body=document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], '["WA"]') + self.assertEqual(self.last_headers[1], '["WA"]') del self.last_headers[:] self.assertEqual(created_document.get('id'), document_definition.get('id')) @@ -320,7 +319,7 @@ def test_partitioned_collection_partition_key_extraction(self): # Create document with partitionkey not present as a leaf level property but a dict created_document = created_collection1.create_item(document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], [{}]) + self.assertEqual(self.last_headers[1], [{}]) del self.last_headers[:] #self.assertEqual(options['partitionKey'], documents.Undefined) @@ -336,7 +335,7 @@ def test_partitioned_collection_partition_key_extraction(self): # Create document with partitionkey not present in the document created_document = created_collection2.create_item(document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], [{}]) + self.assertEqual(self.last_headers[1], [{}]) del self.last_headers[:] #self.assertEqual(options['partitionKey'], documents.Undefined) @@ -362,7 +361,7 @@ def test_partitioned_collection_partition_key_extraction_special_chars(self): _retry_utility.ExecuteFunction = self._MockExecuteFunction created_document = created_collection1.create_item(body=document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], '["val1"]') + self.assertEqual(self.last_headers[1], '["val1"]') del self.last_headers[:] collection_definition2 = { @@ -390,7 +389,7 @@ def test_partitioned_collection_partition_key_extraction_special_chars(self): # create document without partition key being specified created_document = created_collection2.create_item(body=document_definition) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.last_headers[1], '["val2"]') + self.assertEqual(self.last_headers[1], '["val2"]') del self.last_headers[:] created_db.delete_container(created_collection1.id) @@ -540,7 +539,7 @@ def test_partitioned_collection_permissions(self): resource_tokens[urllib.quote(read_collection.id)] = (read_permission.properties['_token']) restricted_client = cosmos_client.CosmosClient( - CRUDTests.host, {'resourceTokens': resource_tokens}, "Session", CRUDTests.connectionPolicy) + CRUDTests.host, resource_tokens, "Session", connection_policy=CRUDTests.connectionPolicy) document_definition = {'id': 'document1', 'key': 1 @@ -693,7 +692,7 @@ def test_partitioned_collection_conflict_crud_and_query(self): ) # Read conflict feed doesn't requires partitionKey to be specified as it's a cross partition thing - conflictlist = list(created_collection.read_all_conflicts()) + conflictlist = list(created_collection.list_conflicts()) self.assertEqual(0, len(conflictlist)) # delete conflict here will return resource not found(404) since there is no conflict here @@ -809,9 +808,7 @@ def test_document_crud(self): created_collection.replace_item, replaced_document['id'], replaced_document, - None, - None, - {'type': 'IfMatch', 'condition': old_etag}, + if_match=old_etag, ) # should pass for most recent etag @@ -1043,14 +1040,14 @@ def test_user_crud(self): # create database db = self.databaseForTest # list users - users = list(db.read_all_users()) + users = list(db.list_users()) before_create_count = len(users) # create user user_id = 'new user' + str(uuid.uuid4()) user = db.create_user(body={'id': user_id}) self.assertEqual(user.id, user_id, 'user id error') # list users after creation - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 1) # query users results = list(db.query_users( @@ -1087,7 +1084,7 @@ def test_user_upsert(self): db = self.databaseForTest # read users and check count - users = list(db.read_all_users()) + users = list(db.list_users()) before_create_count = len(users) # create user using Upsert API @@ -1098,7 +1095,7 @@ def test_user_upsert(self): self.assertEqual(user.id, user_id, 'user id error') # read users after creation and verify updated count - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 1) # Should replace the user since it already exists, there is no public property to change here @@ -1111,7 +1108,7 @@ def test_user_upsert(self): 'user id should remain same') # read users after upsert and verify count doesn't increases again - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 1) user_properties = user.read() @@ -1125,7 +1122,7 @@ def test_user_upsert(self): self.assertEqual(new_user.id, user.id, 'user id error') # read users after upsert and verify count increases - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count + 2) # delete users @@ -1133,7 +1130,7 @@ def test_user_upsert(self): db.delete_user(new_user.id) # read users after delete and verify count remains the same - users = list(db.read_all_users()) + users = list(db.list_users()) self.assertEqual(len(users), before_create_count) def test_permission_crud(self): @@ -1143,7 +1140,7 @@ def test_permission_crud(self): # create user user = db.create_user(body={'id': 'new user' + str(uuid.uuid4())}) # list permissions - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) before_create_count = len(permissions) permission = { 'id': 'new permission', @@ -1156,7 +1153,7 @@ def test_permission_crud(self): 'new permission', 'permission id error') # list permissions after creation - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 1) # query permissions results = list(user.query_permissions( @@ -1165,7 +1162,7 @@ def test_permission_crud(self): {'name': '@id', 'value': permission.id} ] )) - self.assert_(results) + self.assertTrue(results) # replace permission change_permission = permission.properties.copy() @@ -1196,7 +1193,7 @@ def test_permission_upsert(self): user = db.create_user(body={'id': 'new user' + str(uuid.uuid4())}) # read permissions and check count - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) before_create_count = len(permissions) permission_definition = { @@ -1214,7 +1211,7 @@ def test_permission_upsert(self): 'permission id error') # read permissions after creation and verify updated count - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 1) # update permission mode @@ -1233,7 +1230,7 @@ def test_permission_upsert(self): 'permissionMode should change') # read permissions and verify count doesn't increases again - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 1) # update permission id @@ -1256,7 +1253,7 @@ def test_permission_upsert(self): 'permission resource should be same') # read permissions and verify count increases - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count + 2) # delete permissions @@ -1264,7 +1261,7 @@ def test_permission_upsert(self): user.delete_permission(new_permission.id) # read permissions and verify count remains the same - permissions = list(user.read_all_permissions()) + permissions = list(user.list_permissions()) self.assertEqual(len(permissions), before_create_count) def test_authorization(self): @@ -1350,15 +1347,15 @@ def __SetupEntities(client): return entities # Client without any authorization will fail. - client = cosmos_client.CosmosClient(CRUDTests.host, {}, "Session", CRUDTests.connectionPolicy) + client = cosmos_client.CosmosClient(CRUDTests.host, {}, "Session", connection_policy=CRUDTests.connectionPolicy) self.__AssertHTTPFailureWithStatus(StatusCodes.UNAUTHORIZED, list, - client.read_all_databases()) + client.list_databases()) # Client with master key. client = cosmos_client.CosmosClient(CRUDTests.host, - {'masterKey': CRUDTests.masterKey}, + CRUDTests.masterKey, "Session", - CRUDTests.connectionPolicy) + connection_policy=CRUDTests.connectionPolicy) # setup entities entities = __SetupEntities(client) resource_tokens = {} @@ -1367,7 +1364,7 @@ def __SetupEntities(client): resource_tokens[entities['doc1']['id']]= ( entities['permissionOnColl1'].properties['_token']) col1_client = cosmos_client.CosmosClient( - CRUDTests.host, {'resourceTokens': resource_tokens},"Session", CRUDTests.connectionPolicy) + CRUDTests.host, resource_tokens,"Session", connection_policy=CRUDTests.connectionPolicy) db = entities['db'] old_client_connection = db.client_connection @@ -1397,7 +1394,9 @@ def __SetupEntities(client): 'Expected to read children using parent permissions') col2_client = cosmos_client.CosmosClient( CRUDTests.host, - {'permissionFeed': [entities['permissionOnColl2'].properties]}, "Session", CRUDTests.connectionPolicy) + [entities['permissionOnColl2'].properties], + "Session", + connection_policy=CRUDTests.connectionPolicy) doc = { 'CustomProperty1': 'BBBBBB', 'customProperty2': 1000, @@ -1452,7 +1451,7 @@ def test_trigger_crud(self): {'name': '@id', 'value': trigger_definition['id']} ] )) - self.assert_(triggers) + self.assertTrue(triggers) # replace trigger change_trigger = trigger.copy() @@ -1510,7 +1509,7 @@ def test_udf_crud(self): {'name': '@id', 'value': udf_definition['id']} ] )) - self.assert_(results) + self.assertTrue(results) # replace udf change_udf = udf.copy() udf['body'] = 'function() {var x = 20;}' @@ -1855,7 +1854,11 @@ def test_create_indexing_policy_with_composite_and_spatial_indexes(self): created_container = db.create_container( id='composite_index_spatial_index' + str(uuid.uuid4()), indexing_policy=indexing_policy, - partition_key=PartitionKey(path='/id', kind='Hash') + partition_key=PartitionKey(path='/id', kind='Hash'), + headers={"Foo":"bar"}, + user_agent="blah", + user_agent_overwrite=True, + logging_enable=True ) created_properties = created_container.read() read_indexing_policy = created_properties['indexingPolicy'] @@ -1954,7 +1957,7 @@ def test_client_request_timeout(self): connection_policy.RequestTimeout = 0 with self.assertRaises(Exception): # client does a getDatabaseAccount on initialization, which will time out - cosmos_client.CosmosClient(CRUDTests.host, {'masterKey': CRUDTests.masterKey}, "Session", connection_policy) + cosmos_client.CosmosClient(CRUDTests.host, CRUDTests.masterKey, "Session", connection_policy=connection_policy) def test_query_iterable_functionality(self): def __create_resources(client): @@ -2013,18 +2016,15 @@ def __create_resources(client): # Get query results page by page. results = resources['coll'].read_all_items(max_item_count=2) - first_block = results.fetch_next_block() - self.assertEqual(2, - len(first_block), - 'First block should have 2 entries.') + + page_iter = results.by_page() + first_block = list(next(page_iter)) + self.assertEqual(2, len(first_block), 'First block should have 2 entries.') self.assertEqual(resources['doc1']['id'], first_block[0]['id']) self.assertEqual(resources['doc2']['id'], first_block[1]['id']) - self.assertEqual(1, - len(results.fetch_next_block()), - 'Second block should have 1 entry.') - self.assertEqual(0, - len(results.fetch_next_block()), - 'Then its empty.') + self.assertEqual(1, len(list(next(page_iter))), 'Second block should have 1 entry.') + with self.assertRaises(StopIteration): + next(page_iter) def test_trigger_functionality(self): triggers_in_collection1 = [ @@ -2388,7 +2388,7 @@ def test_id_case_validation(self): collection_id2 = 'SampleCollection ' + uuid_string # Verify that no collections exist - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) number_of_existing_collections = len(collections) # create 2 collections with different casing of IDs @@ -2404,7 +2404,7 @@ def test_id_case_validation(self): partition_key=PartitionKey(path='/id', kind='Hash') ) - collections = list(created_db.read_all_containers()) + collections = list(created_db.list_containers()) # verify if a total of 2 collections got created self.assertEqual(len(collections), number_of_existing_collections + 2) @@ -2452,40 +2452,40 @@ def test_get_resource_with_dictionary_and_object(self): # read database with id read_db = self.client.get_database_client(created_db.id) - self.assertEquals(read_db.id, created_db.id) + self.assertEqual(read_db.id, created_db.id) # read database with instance read_db = self.client.get_database_client(created_db) - self.assertEquals(read_db.id, created_db.id) + self.assertEqual(read_db.id, created_db.id) # read database with properties read_db = self.client.get_database_client(created_db.read()) - self.assertEquals(read_db.id, created_db.id) + self.assertEqual(read_db.id, created_db.id) created_container = self.configs.create_multi_partition_collection_if_not_exist(self.client) # read container with id read_container = created_db.get_container_client(created_container.id) - self.assertEquals(read_container.id, created_container.id) + self.assertEqual(read_container.id, created_container.id) # read container with instance read_container = created_db.get_container_client(created_container) - self.assertEquals(read_container.id, created_container.id) + self.assertEqual(read_container.id, created_container.id) # read container with properties created_properties = created_container.read() read_container = created_db.get_container_client(created_properties) - self.assertEquals(read_container.id, created_container.id) + self.assertEqual(read_container.id, created_container.id) created_item = created_container.create_item({'id':'1' + str(uuid.uuid4())}) # read item with id read_item = created_container.read_item(item=created_item['id'], partition_key=created_item['id']) - self.assertEquals(read_item['id'], created_item['id']) + self.assertEqual(read_item['id'], created_item['id']) # read item with properties read_item = created_container.read_item(item=created_item, partition_key=created_item['id']) - self.assertEquals(read_item['id'], created_item['id']) + self.assertEqual(read_item['id'], created_item['id']) created_sproc = created_container.scripts.create_stored_procedure({ 'id': 'storedProcedure' + str(uuid.uuid4()), @@ -2494,11 +2494,11 @@ def test_get_resource_with_dictionary_and_object(self): # read sproc with id read_sproc = created_container.scripts.get_stored_procedure(created_sproc['id']) - self.assertEquals(read_sproc['id'], created_sproc['id']) + self.assertEqual(read_sproc['id'], created_sproc['id']) # read sproc with properties read_sproc = created_container.scripts.get_stored_procedure(created_sproc) - self.assertEquals(read_sproc['id'], created_sproc['id']) + self.assertEqual(read_sproc['id'], created_sproc['id']) created_trigger = created_container.scripts.create_trigger({ 'id': 'sample trigger' + str(uuid.uuid4()), @@ -2509,11 +2509,11 @@ def test_get_resource_with_dictionary_and_object(self): # read trigger with id read_trigger = created_container.scripts.get_trigger(created_trigger['id']) - self.assertEquals(read_trigger['id'], created_trigger['id']) + self.assertEqual(read_trigger['id'], created_trigger['id']) # read trigger with properties read_trigger = created_container.scripts.get_trigger(created_trigger) - self.assertEquals(read_trigger['id'], created_trigger['id']) + self.assertEqual(read_trigger['id'], created_trigger['id']) created_udf = created_container.scripts.create_user_defined_function({ 'id': 'sample udf' + str(uuid.uuid4()), @@ -2522,11 +2522,11 @@ def test_get_resource_with_dictionary_and_object(self): # read udf with id read_udf = created_container.scripts.get_user_defined_function(created_udf['id']) - self.assertEquals(created_udf['id'], read_udf['id']) + self.assertEqual(created_udf['id'], read_udf['id']) # read udf with properties read_udf = created_container.scripts.get_user_defined_function(created_udf) - self.assertEquals(created_udf['id'], read_udf['id']) + self.assertEqual(created_udf['id'], read_udf['id']) created_user = created_db.create_user({ 'id': 'user' + str(uuid.uuid4()) @@ -2534,16 +2534,16 @@ def test_get_resource_with_dictionary_and_object(self): # read user with id read_user = created_db.get_user_client(created_user.id) - self.assertEquals(read_user.id, created_user.id) + self.assertEqual(read_user.id, created_user.id) # read user with instance read_user = created_db.get_user_client(created_user) - self.assertEquals(read_user.id, created_user.id) + self.assertEqual(read_user.id, created_user.id) # read user with properties created_user_properties = created_user.read() read_user = created_db.get_user_client(created_user_properties) - self.assertEquals(read_user.id, created_user.id) + self.assertEqual(read_user.id, created_user.id) created_permission = created_user.create_permission({ 'id': 'all permission' + str(uuid.uuid4()), @@ -2554,15 +2554,15 @@ def test_get_resource_with_dictionary_and_object(self): # read permission with id read_permission = created_user.get_permission(created_permission.id) - self.assertEquals(read_permission.id, created_permission.id) + self.assertEqual(read_permission.id, created_permission.id) # read permission with instance read_permission = created_user.get_permission(created_permission) - self.assertEquals(read_permission.id, created_permission.id) + self.assertEqual(read_permission.id, created_permission.id) # read permission with properties read_permission = created_user.get_permission(created_permission.properties) - self.assertEquals(read_permission.id, created_permission.id) + self.assertEqual(read_permission.id, created_permission.id) def _MockExecuteFunction(self, function, *args, **kwargs): self.last_headers.append(args[4].headers[HttpHeaders.PartitionKey] diff --git a/sdk/cosmos/azure-cosmos/test/encoding_tests.py b/sdk/cosmos/azure-cosmos/test/encoding_tests.py index d48f8ef9e58e..1f0c23e7a334 100644 --- a/sdk/cosmos/azure-cosmos/test/encoding_tests.py +++ b/sdk/cosmos/azure-cosmos/test/encoding_tests.py @@ -25,7 +25,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_collection = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client) diff --git a/sdk/cosmos/azure-cosmos/test/env_test.py b/sdk/cosmos/azure-cosmos/test/env_test.py index 5210a0bc7ce2..62c0f81e665d 100644 --- a/sdk/cosmos/azure-cosmos/test/env_test.py +++ b/sdk/cosmos/azure-cosmos/test/env_test.py @@ -60,7 +60,7 @@ def setUpClass(cls): os.environ["COSMOS_ENDPOINT"] = cls.host os.environ["COSMOS_KEY"] = cls.masterKey - cls.client = cosmos_client.CosmosClient(url=cls.host, auth={'masterKey': cls.masterKey }, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(url=cls.host, credential=cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = test_config._test_config.create_single_partition_collection_if_not_exist(cls.client) diff --git a/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py b/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py index 5c07812625bf..886e842be63c 100644 --- a/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py +++ b/sdk/cosmos/azure-cosmos/test/globaldb_mock_tests.py @@ -152,10 +152,14 @@ def MockExecuteFunction(self, function, *args, **kwargs): else: self.endpoint_discovery_retry_count += 1 location_changed = True - raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Forbidden", {'x-ms-substatus' : 3}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.FORBIDDEN, + message="Forbidden", + response=test_config.FakeResponse({'x-ms-substatus' : 3})) def MockGetDatabaseAccountStub(self, endpoint): - raise errors.HTTPFailure(StatusCodes.SERVICE_UNAVAILABLE, "Service unavailable") + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.SERVICE_UNAVAILABLE, message="Service unavailable") def MockCreateDatabase(self, client, database): self.OriginalExecuteFunction = _retry_utility.ExecuteFunction @@ -166,7 +170,7 @@ def test_globaldb_endpoint_discovery_retry_policy(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = True - write_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.write_location_host, {'masterKey': Test_globaldb_mock_tests.masterKey}, connection_policy) + write_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.write_location_host, Test_globaldb_mock_tests.masterKey, connection_policy) self.assertEqual(write_location_client._global_endpoint_manager.WriteEndpoint, Test_globaldb_mock_tests.write_location_host) self.MockCreateDatabase(write_location_client, { 'id': 'mock database' }) @@ -177,7 +181,7 @@ def test_globaldb_database_account_unavailable(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = True - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.host, {'masterKey': Test_globaldb_mock_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_mock_tests.host, Test_globaldb_mock_tests.masterKey, connection_policy) self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_mock_tests.write_location_host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_mock_tests.write_location_host) diff --git a/sdk/cosmos/azure-cosmos/test/globaldb_tests.py b/sdk/cosmos/azure-cosmos/test/globaldb_tests.py index e0e8eb41db7e..fc9e5d194f38 100644 --- a/sdk/cosmos/azure-cosmos/test/globaldb_tests.py +++ b/sdk/cosmos/azure-cosmos/test/globaldb_tests.py @@ -72,7 +72,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, sub_status, func, *args, ** try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) self.assertEqual(inst.sub_status, sub_status) @@ -86,7 +86,7 @@ def setUpClass(cls): "tests.") def setUp(self): - self.client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}) + self.client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey) # Create the test database only when it's not already present query_iterable = self.client.QueryDatabases('SELECT * FROM root r WHERE r.id=\'' + Test_globaldb_tests.test_database_id + '\'') @@ -114,7 +114,7 @@ def test_globaldb_read_write_endpoints(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = False - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) document_definition = { 'id': 'doc', 'name': 'sample document', @@ -140,7 +140,7 @@ def test_globaldb_read_write_endpoints(self): connection_policy.EnableEndpointDiscovery = True document_definition['id'] = 'doc2' - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # When EnableEndpointDiscovery is True, WriteEndpoint is set to the write endpoint created_document = client.CreateItem(self.test_coll['_self'], document_definition) @@ -163,7 +163,7 @@ def test_globaldb_endpoint_discovery(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = False - read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, Test_globaldb_tests.masterKey, connection_policy) document_definition = { 'id': 'doc', 'name': 'sample document', @@ -187,7 +187,7 @@ def test_globaldb_endpoint_discovery(self): })) connection_policy.EnableEndpointDiscovery = True - read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + read_location_client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.read_location_host, Test_globaldb_tests.masterKey, connection_policy) # CreateDocument call will go to the WriteEndpoint as EnableEndpointDiscovery is set to True and client will resolve the right endpoint based on the operation created_document = read_location_client.CreateItem(self.test_coll['_self'], document_definition) @@ -197,7 +197,7 @@ def test_globaldb_preferred_locations(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = True - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) document_definition = { 'id': 'doc', 'name': 'sample document', @@ -220,7 +220,7 @@ def test_globaldb_preferred_locations(self): self.assertEqual(client.ReadEndpoint, Test_globaldb_tests.write_location_host) connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) document_definition['id'] = 'doc2' created_document = client.CreateItem(self.test_coll['_self'], document_definition) @@ -242,28 +242,28 @@ def test_globaldb_endpoint_assignments(self): connection_policy = documents.ConnectionPolicy() connection_policy.EnableEndpointDiscovery = False - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # When EnableEndpointDiscovery is set to False, both Read and Write Endpoints point to endpoint passed while creating the client instance self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_tests.host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_tests.host) connection_policy.EnableEndpointDiscovery = True - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # If no preferred locations is set, we return the write endpoint as ReadEndpoint for better latency performance, write endpoint is set as expected self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_tests.write_location_host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_tests.write_location_host) connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) # Test that the preferred location is set as ReadEndpoint instead of default write endpoint when no preference is set self.assertEqual(client._global_endpoint_manager.WriteEndpoint, Test_globaldb_tests.write_location_host) self.assertEqual(client._global_endpoint_manager.ReadEndpoint, Test_globaldb_tests.read_location2_host) def test_globaldb_update_locations_cache(self): - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey) writable_locations = [{'name' : Test_globaldb_tests.write_location, 'databaseAccountEndpoint' : Test_globaldb_tests.write_location_host}] readable_locations = [{'name' : Test_globaldb_tests.read_location, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location_host}, {'name' : Test_globaldb_tests.read_location2, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location2_host}] @@ -307,7 +307,7 @@ def test_globaldb_update_locations_cache(self): connection_policy = documents.ConnectionPolicy() connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) write_endpoint, read_endpoint = client._global_endpoint_manager.UpdateLocationsCache(writable_locations, readable_locations) @@ -321,7 +321,7 @@ def test_globaldb_update_locations_cache(self): connection_policy = documents.ConnectionPolicy() connection_policy.PreferredLocations = [Test_globaldb_tests.read_location2] - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) write_endpoint, read_endpoint = client._global_endpoint_manager.UpdateLocationsCache(writable_locations, readable_locations) @@ -333,7 +333,7 @@ def test_globaldb_update_locations_cache(self): readable_locations = [{'name' : Test_globaldb_tests.read_location, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location_host}, {'name' : Test_globaldb_tests.read_location2, 'databaseAccountEndpoint' : Test_globaldb_tests.read_location2_host}] connection_policy.EnableEndpointDiscovery = False - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}, connection_policy) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey, connection_policy) write_endpoint, read_endpoint = client._global_endpoint_manager.UpdateLocationsCache(writable_locations, readable_locations) @@ -357,7 +357,7 @@ def test_globaldb_locational_endpoint_parser(self): self.assertEqual(locational_endpoint, 'https://contoso-EastUS.documents.azure.com:443/') def test_globaldb_endpoint_discovery_retry_policy_mock(self): - client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, {'masterKey': Test_globaldb_tests.masterKey}) + client = cosmos_client_connection.CosmosClientConnection(Test_globaldb_tests.host, Test_globaldb_tests.masterKey) self.OriginalExecuteFunction = _retry_utility.ExecuteFunction _retry_utility.ExecuteFunction = self._MockExecuteFunction @@ -385,7 +385,11 @@ def test_globaldb_endpoint_discovery_retry_policy_mock(self): _retry_utility.ExecuteFunction = self.OriginalExecuteFunction def _MockExecuteFunction(self, function, *args, **kwargs): - raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Write Forbidden", {'x-ms-substatus' : SubStatusCodes.WRITE_FORBIDDEN}) + response = test_config.FakeResponse({'x-ms-substatus' : SubStatusCodes.WRITE_FORBIDDEN}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.FORBIDDEN, + message="Write Forbidden", + response=response) def _MockGetDatabaseAccount(self, url_conection): database_account = documents.DatabaseAccount() diff --git a/sdk/cosmos/azure-cosmos/test/location_cache_tests.py b/sdk/cosmos/azure-cosmos/test/location_cache_tests.py index a2772ce270e8..7b5479bf22bf 100644 --- a/sdk/cosmos/azure-cosmos/test/location_cache_tests.py +++ b/sdk/cosmos/azure-cosmos/test/location_cache_tests.py @@ -11,10 +11,12 @@ import azure.cosmos.errors as errors from azure.cosmos.http_constants import StatusCodes, SubStatusCodes, HttpHeaders from azure.cosmos import _retry_utility +import test_config import six pytestmark = pytest.mark.cosmosEmulator + class RefreshThread(threading.Thread): def __init__(self, group=None, target=None, name=None, args=(), kwargs=None, verbose=None): @@ -87,7 +89,7 @@ def validate_retry_on_session_not_availabe_with_endpoint_discovery_disabled(self else: client.CreateItem("dbs/mydb/colls/mycoll/", {'id':'1'}) self.fail() - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: # not retried self.assertEqual(self.counter, 1) self.counter = 0 @@ -99,7 +101,11 @@ def validate_retry_on_session_not_availabe_with_endpoint_discovery_disabled(self def _MockExecuteFunctionSessionReadFailureOnce(self, function, *args, **kwargs): self.counter += 1 - raise errors.HTTPFailure(StatusCodes.NOT_FOUND, "Read Session not available", {HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.NOT_FOUND, + message="Read Session not available", + response=response) def test_validate_retry_on_session_not_availabe_with_endpoint_discovery_enabled(self): # sequence of chosen endpoints: @@ -128,7 +134,7 @@ def validate_retry_on_session_not_availabe(self, is_preferred_locations_list_emp try: client.ReadItem("dbs/mydb/colls/mycoll/docs/1") - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: # not retried self.assertEqual(self.counter, 4 if use_multiple_write_locations else 2) self.counter = 0 @@ -160,7 +166,11 @@ def _MockExecuteFunctionSessionReadFailureTwice(self, function, *args, **kwargs) self.assertTrue(request.should_clear_session_token_on_session_read_failure) self.assertEqual(expected_endpoint, request.location_endpoint_to_route) self.counter += 1 - raise errors.HTTPFailure(StatusCodes.NOT_FOUND, "Read Session not available", {HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.NOT_FOUND, + message="Read Session not available", + response=response) def test_validate_location_cache(self): self.original_get_database_account = cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount diff --git a/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py b/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py index f10493281e99..b68db27b520e 100644 --- a/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py +++ b/sdk/cosmos/azure-cosmos/test/multiOrderbyTests.py @@ -61,7 +61,7 @@ class MultiOrderbyTests(unittest.TestCase): @classmethod def setUpClass(cls): - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.database = test_config._test_config.create_database_if_not_exist(cls.client) def generate_multi_orderby_item(self): diff --git a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py index e58dff33a4ae..efe8c3f7619d 100644 --- a/sdk/cosmos/azure-cosmos/test/multimaster_tests.py +++ b/sdk/cosmos/azure-cosmos/test/multimaster_tests.py @@ -36,8 +36,8 @@ def _validate_tentative_write_headers(self): connectionPolicy = MultiMasterTests.connectionPolicy connectionPolicy.UseMultipleWriteLocations = True - client = cosmos_client.CosmosClient(MultiMasterTests.host, {'masterKey': MultiMasterTests.masterKey}, "Session", - connectionPolicy) + client = cosmos_client.CosmosClient(MultiMasterTests.host, MultiMasterTests.masterKey, "Session", + connection_policy=connectionPolicy) created_db = client.create_database(id='multi_master_tests ' + str(uuid.uuid4())) diff --git a/sdk/cosmos/azure-cosmos/test/orderby_tests.py b/sdk/cosmos/azure-cosmos/test/orderby_tests.py index 7b84ed856aaa..ba1eb99b3c17 100644 --- a/sdk/cosmos/azure-cosmos/test/orderby_tests.py +++ b/sdk/cosmos/azure-cosmos/test/orderby_tests.py @@ -22,6 +22,7 @@ import unittest import uuid import pytest +from azure.core.paging import ItemPaged import azure.cosmos.documents as documents from azure.cosmos.partition_key import PartitionKey import azure.cosmos.cosmos_client as cosmos_client @@ -61,7 +62,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = CrossPartitionTopOrderByTest.create_collection(cls.client, cls.created_db) cls.collection_link = cls.GetDocumentCollectionLink(cls.created_db, cls.created_collection) @@ -431,8 +432,15 @@ def find_docs_by_partition_key_range_id(self): collection_id = base.GetResourceIdOrFullNameFromLink(self.collection_link) def fetch_fn(options): return self.client.client_connection.QueryFeed(path, collection_id, query, options, r['id']) - docResultsIterable = query_iterable.QueryIterable(self.client.client_connection, query, options, fetch_fn, self.collection_link) - + docResultsIterable = ItemPaged( + self.client.client_connection, + query, + options, + fetch_function=fetch_fn, + collection_link=self.collection_link, + page_iterator_class=query_iterable.QueryIterable + ) + docs = list(docResultsIterable) self.assertFalse(r['id'] in docs_by_partition_key_range_id) docs_by_partition_key_range_id[r['id']] = docs @@ -448,7 +456,8 @@ def execute_query_and_validate_results(self, query, expected_ordered_ids): max_item_count=page_size ) - self.assertTrue(isinstance(result_iterable, query_iterable.QueryIterable)) + self.assertTrue(isinstance(result_iterable, ItemPaged)) + self.assertEqual(result_iterable._page_iterator_class, query_iterable.QueryIterable) ###################################### # test next() behavior @@ -466,14 +475,15 @@ def invokeNext(): self.assertRaises(StopIteration, invokeNext) ###################################### - # test fetch_next_block() behavior + # test by_page() behavior ###################################### results = {} cnt = 0 - while True: - fetched_res = result_iterable.fetch_next_block() + page_iter = result_iterable.by_page() + for page in page_iter: + fetched_res = list(page) fetched_size = len(fetched_res) - + for item in fetched_res: self.assertEqual(item['id'], expected_ordered_ids[cnt]) results[cnt] = item @@ -487,12 +497,14 @@ def invokeNext(): else: #cnt > expected_number_of_results self.fail("more results than expected") + # validate the number of collected results self.assertEqual(len(results), len(expected_ordered_ids)) # no more results will be returned - self.assertEqual(result_iterable.fetch_next_block(), []) + with self.assertRaises(StopIteration): + next(page_iter) @classmethod def create_collection(self, client, created_db): diff --git a/sdk/cosmos/azure-cosmos/test/partition_key_tests.py b/sdk/cosmos/azure-cosmos/test/partition_key_tests.py index cccd4ca3ae97..026b1f30ae56 100644 --- a/sdk/cosmos/azure-cosmos/test/partition_key_tests.py +++ b/sdk/cosmos/azure-cosmos/test/partition_key_tests.py @@ -49,7 +49,7 @@ def tearDownClass(cls): @classmethod def setUpClass(cls): - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client) @@ -128,25 +128,25 @@ def test_non_partitioned_collection_operations(self): # Pass partitionKey.Empty as partition key to access documents from a single partition collection with v 2018-12-31 SDK read_item = created_container.read_item(self.created_document['id'], partition_key=partition_key.NonePartitionKeyValue) - self.assertEquals(read_item['id'], self.created_document['id']) + self.assertEqual(read_item['id'], self.created_document['id']) document_definition = {'id': str(uuid.uuid4())} created_item = created_container.create_item(body=document_definition) - self.assertEquals(created_item['id'], document_definition['id']) + self.assertEqual(created_item['id'], document_definition['id']) read_item = created_container.read_item(created_item['id'], partition_key=partition_key.NonePartitionKeyValue) - self.assertEquals(read_item['id'], created_item['id']) + self.assertEqual(read_item['id'], created_item['id']) document_definition_for_replace = {'id': str(uuid.uuid4())} replaced_item = created_container.replace_item(created_item['id'], body=document_definition_for_replace) - self.assertEquals(replaced_item['id'], document_definition_for_replace['id']) + self.assertEqual(replaced_item['id'], document_definition_for_replace['id']) upserted_item = created_container.upsert_item(body=document_definition) - self.assertEquals(upserted_item['id'], document_definition['id']) + self.assertEqual(upserted_item['id'], document_definition['id']) # one document was created during setup, one with create (which was replaced) and one with upsert items = list(created_container.query_items("SELECT * from c", partition_key=partition_key.NonePartitionKeyValue)) - self.assertEquals(len(items), 3) + self.assertEqual(len(items), 3) document_created_by_sproc_id = 'testDoc' sproc = { @@ -170,7 +170,7 @@ def test_non_partitioned_collection_operations(self): # 3 previous items + 1 created from the sproc items = list(created_container.read_all_items()) - self.assertEquals(len(items), 4) + self.assertEqual(len(items), 4) created_container.delete_item(upserted_item['id'], partition_key=partition_key.NonePartitionKeyValue) created_container.delete_item(replaced_item['id'], partition_key=partition_key.NonePartitionKeyValue) @@ -178,13 +178,13 @@ def test_non_partitioned_collection_operations(self): created_container.delete_item(self.created_document['id'], partition_key=partition_key.NonePartitionKeyValue) items = list(created_container.read_all_items()) - self.assertEquals(len(items), 0) + self.assertEqual(len(items), 0) def test_multi_partition_collection_read_document_with_no_pk(self): document_definition = {'id': str(uuid.uuid4())} self.created_collection.create_item(body=document_definition) read_item = self.created_collection.read_item(item=document_definition['id'], partition_key=partition_key.NonePartitionKeyValue) - self.assertEquals(read_item['id'], document_definition['id']) + self.assertEqual(read_item['id'], document_definition['id']) self.created_collection.delete_item(item=document_definition['id'], partition_key=partition_key.NonePartitionKeyValue) def test_hash_v2_partition_key_definition(self): @@ -193,7 +193,7 @@ def test_hash_v2_partition_key_definition(self): partition_key=partition_key.PartitionKey(path="/id", kind="Hash") ) created_container_properties = created_container.read() - self.assertEquals(created_container_properties['partitionKey']['version'], 2) + self.assertEqual(created_container_properties['partitionKey']['version'], 2) self.created_db.delete_container(created_container) created_container = self.created_db.create_container( @@ -201,7 +201,7 @@ def test_hash_v2_partition_key_definition(self): partition_key=partition_key.PartitionKey(path="/id", kind="Hash", version=2) ) created_container_properties = created_container.read() - self.assertEquals(created_container_properties['partitionKey']['version'], 2) + self.assertEqual(created_container_properties['partitionKey']['version'], 2) self.created_db.delete_container(created_container) def test_hash_v1_partition_key_definition(self): @@ -210,5 +210,5 @@ def test_hash_v1_partition_key_definition(self): partition_key=partition_key.PartitionKey(path="/id", kind="Hash", version=1) ) created_container_properties = created_container.read() - self.assertEquals(created_container_properties['partitionKey']['version'], 1) + self.assertEqual(created_container_properties['partitionKey']['version'], 1) self.created_db.delete_container(created_container) diff --git a/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py b/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py index f8138b6d5a8a..9b73a4f738d9 100644 --- a/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_execution_context_tests.py @@ -58,9 +58,9 @@ def setUpClass(cls): "tests.") cls.client = cosmos_client.CosmosClient(QueryExecutionContextEndToEndTests.host, - {'masterKey': QueryExecutionContextEndToEndTests.masterKey}, - "Session", - QueryExecutionContextEndToEndTests.connectionPolicy) + QueryExecutionContextEndToEndTests.masterKey, + "Session", + connection_policy=QueryExecutionContextEndToEndTests.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = cls.create_collection(cls.created_db) cls.document_definitions = [] diff --git a/sdk/cosmos/azure-cosmos/test/query_tests.py b/sdk/cosmos/azure-cosmos/test/query_tests.py index ca6c8a377fc0..862263f07d59 100644 --- a/sdk/cosmos/azure-cosmos/test/query_tests.py +++ b/sdk/cosmos/azure-cosmos/test/query_tests.py @@ -25,7 +25,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = cls.config.create_database_if_not_exist(cls.client) def test_first_and_last_slashes_trimmed_for_query_string (self): @@ -56,7 +56,7 @@ def test_query_change_feed(self): iter_list = list(query_iterable) self.assertEqual(len(iter_list), 0) self.assertTrue('etag' in created_collection.client_connection.last_response_headers) - self.assertNotEquals(created_collection.client_connection.last_response_headers['etag'], '') + self.assertNotEqual(created_collection.client_connection.last_response_headers['etag'], '') # Read change feed from beginning should return an empty list query_iterable = created_collection.query_items_change_feed( @@ -67,7 +67,7 @@ def test_query_change_feed(self): self.assertEqual(len(iter_list), 0) self.assertTrue('etag' in created_collection.client_connection.last_response_headers) continuation1 = created_collection.client_connection.last_response_headers['etag'] - self.assertNotEquals(continuation1, '') + self.assertNotEqual(continuation1, '') # Create a document. Read change feed should return be able to read that document document_definition = {'pk': 'pk', 'id':'doc1'} @@ -81,8 +81,8 @@ def test_query_change_feed(self): self.assertEqual(iter_list[0]['id'], 'doc1') self.assertTrue('etag' in created_collection.client_connection.last_response_headers) continuation2 = created_collection.client_connection.last_response_headers['etag'] - self.assertNotEquals(continuation2, '') - self.assertNotEquals(continuation2, continuation1) + self.assertNotEqual(continuation2, '') + self.assertNotEqual(continuation2, continuation1) # Create two new documents. Verify that change feed contains the 2 new documents # with page size 1 and page size 100 @@ -105,7 +105,7 @@ def test_query_change_feed(self): actual_ids += item['id'] + '.' self.assertEqual(actual_ids, expected_ids) - # verify fetch_next_block + # verify by_page # the options is not copied, therefore it need to be restored query_iterable = created_collection.query_items_change_feed( partition_key_range_id=pkRangeId, @@ -115,19 +115,16 @@ def test_query_change_feed(self): count = 0 expected_count = 2 all_fetched_res = [] - while (True): - fetched_res = query_iterable.fetch_next_block() - self.assertEquals(len(fetched_res), min(pageSize, expected_count - count)) + for page in query_iterable.by_page(): + fetched_res = list(page) + self.assertEqual(len(fetched_res), min(pageSize, expected_count - count)) count += len(fetched_res) all_fetched_res.extend(fetched_res) - if len(fetched_res) == 0: - break + actual_ids = '' for item in all_fetched_res: actual_ids += item['id'] + '.' self.assertEqual(actual_ids, expected_ids) - # verify there's no more results - self.assertEquals(query_iterable.fetch_next_block(), []) # verify reading change feed from the beginning query_iterable = created_collection.query_items_change_feed( @@ -138,7 +135,7 @@ def test_query_change_feed(self): it = query_iterable.__iter__() for i in range(0, len(expected_ids)): doc = next(it) - self.assertEquals(doc['id'], expected_ids[i]) + self.assertEqual(doc['id'], expected_ids[i]) self.assertTrue('etag' in created_collection.client_connection.last_response_headers) continuation3 = created_collection.client_connection.last_response_headers['etag'] @@ -203,11 +200,10 @@ def validate_query_requests_count(self, query_iterable, expected_count): self.count = 0 self.OriginalExecuteFunction = retry_utility.ExecuteFunction retry_utility.ExecuteFunction = self._MockExecuteFunction - block = query_iterable.fetch_next_block() - while block: - block = query_iterable.fetch_next_block() + for block in query_iterable.by_page(): + assert len(list(block)) != 0 retry_utility.ExecuteFunction = self.OriginalExecuteFunction - self.assertEquals(self.count, expected_count) + self.assertEqual(self.count, expected_count) self.count = 0 def _MockExecuteFunction(self, function, *args, **kwargs): diff --git a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py index f25dfce77e78..77b50ea7e3d7 100644 --- a/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py +++ b/sdk/cosmos/azure-cosmos/test/retry_policy_tests.py @@ -58,7 +58,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, func, *args, **kwargs): try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) @classmethod @@ -70,7 +70,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, "Session", cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, "Session", connection_policy=cls.connectionPolicy) cls.created_collection = test_config._test_config.create_single_partition_collection_if_not_exist(cls.client) cls.retry_after_in_milliseconds = 1000 @@ -88,7 +88,7 @@ def test_resource_throttle_retry_policy_default_retry_after(self): try: self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertEqual(connection_policy.RetryOptions.MaxRetryAttemptCount, self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryCount]) self.assertGreaterEqual( self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], @@ -110,7 +110,7 @@ def test_resource_throttle_retry_policy_fixed_retry_after(self): try: self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertEqual(connection_policy.RetryOptions.MaxRetryAttemptCount, self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryCount]) self.assertGreaterEqual(self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], @@ -133,7 +133,7 @@ def test_resource_throttle_retry_policy_max_wait_time(self): try: self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertGreaterEqual(self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryWaitTimeInMs], connection_policy.RetryOptions.MaxWaitTimeInSeconds * 1000) @@ -162,7 +162,7 @@ def test_resource_throttle_retry_policy_query(self): { 'name':'@id', 'value':document_definition['id'] } ] })) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(e.status_code, StatusCodes.TOO_MANY_REQUESTS) self.assertEqual(connection_policy.RetryOptions.MaxRetryAttemptCount, self.created_collection.client_connection.last_response_headers[HttpHeaders.ThrottleRetryCount]) @@ -233,7 +233,7 @@ def test_default_retry_policy_for_create(self): created_document = {} try : created_document = self.created_collection.create_item(body=document_definition) - except errors.HTTPFailure as err: + except errors.CosmosHttpResponseError as err: self.assertEqual(err.status_code, 10054) self.assertDictEqual(created_document, {}) @@ -244,7 +244,12 @@ def test_default_retry_policy_for_create(self): _retry_utility.ExecuteFunction = original_execute_function def _MockExecuteFunction(self, function, *args, **kwargs): - raise errors.HTTPFailure(StatusCodes.TOO_MANY_REQUESTS, "Request rate is too large", {HttpHeaders.RetryAfterInMilliseconds: self.retry_after_in_milliseconds}) + response = test_config.FakeResponse({HttpHeaders.RetryAfterInMilliseconds: self.retry_after_in_milliseconds}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.TOO_MANY_REQUESTS, + message="Request rate is too large", + response=response) + class MockExecuteFunctionConnectionReset(object): @@ -257,7 +262,10 @@ def __call__(self, func, *args, **kwargs): if self.counter % 3 == 0: return self.org_func(func, *args, **kwargs) else: - raise errors.HTTPFailure(10054, "Connection was reset", {}) + raise errors.CosmosHttpResponseError( + status_code=10054, + message="Connection was reset", + response=test_config.FakeResponse({})) if __name__ == '__main__': diff --git a/sdk/cosmos/azure-cosmos/test/routing_map_tests.py b/sdk/cosmos/azure-cosmos/test/routing_map_tests.py index f2f396700be2..ac1fc549d175 100644 --- a/sdk/cosmos/azure-cosmos/test/routing_map_tests.py +++ b/sdk/cosmos/azure-cosmos/test/routing_map_tests.py @@ -55,7 +55,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.collection_link = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client).container_link def test_read_partition_key_ranges(self): diff --git a/sdk/cosmos/azure-cosmos/test/session_container_tests.py b/sdk/cosmos/azure-cosmos/test/session_container_tests.py index 035df68e94ac..4b2ff5513976 100644 --- a/sdk/cosmos/azure-cosmos/test/session_container_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_container_tests.py @@ -37,7 +37,7 @@ class Test_session_container(unittest.TestCase): connectionPolicy = test_config._test_config.connectionPolicy def setUp(self): - self.client = cosmos_client.CosmosClient(self.host, {'masterKey': self.masterkey}, "Session", connection_policy=self.connectionPolicy) + self.client = cosmos_client.CosmosClient(self.host, self.masterkey, "Session", connection_policy=self.connectionPolicy) self.session = self.client.client_connection.Session def tearDown(self): diff --git a/sdk/cosmos/azure-cosmos/test/session_tests.py b/sdk/cosmos/azure-cosmos/test/session_tests.py index f3e7e3e9fe93..c74506248f6f 100644 --- a/sdk/cosmos/azure-cosmos/test/session_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_tests.py @@ -33,7 +33,7 @@ def setUpClass(cls): "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) cls.created_collection = test_config._test_config.create_multi_partition_collection_with_custom_pk_if_not_exist(cls.client) @@ -57,7 +57,11 @@ def test_session_token_not_sent_for_master_resource_ops (self): synchronized_request._Request = self._OriginalRequest def _MockExecuteFunctionSessionReadFailureOnce(self, function, *args, **kwargs): - raise errors.HTTPFailure(StatusCodes.NOT_FOUND, "Read Session not available", {HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.READ_SESSION_NOTAVAILABLE}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.NOT_FOUND, + message="Read Session not available", + response=response) def test_clear_session_token(self): created_document = self.created_collection.create_item(body={'id': '1' + str(uuid.uuid4()), 'pk': 'mypk'}) @@ -66,7 +70,7 @@ def test_clear_session_token(self): _retry_utility.ExecuteFunction = self._MockExecuteFunctionSessionReadFailureOnce try: self.created_collection.read_item(item=created_document['id'], partition_key='mypk') - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: self.assertEqual(self.client.client_connection.session.get_session_token( 'dbs/' + self.created_db.id + '/colls/' + self.created_collection.id), "") self.assertEqual(e.status_code, StatusCodes.NOT_FOUND) @@ -84,7 +88,7 @@ def test_internal_server_error_raised_for_invalid_session_token_received_from_se try: self.created_collection.create_item(body={'id': '1' + str(uuid.uuid4()), 'pk': 'mypk'}) self.fail() - except errors.HTTPFailure as e: - self.assertEqual(e._http_error_message, "Could not parse the received session token: 2") + except errors.CosmosHttpResponseError as e: + self.assertEqual(e.http_error_message, "Could not parse the received session token: 2") self.assertEqual(e.status_code, StatusCodes.INTERNAL_SERVER_ERROR) _retry_utility.ExecuteFunction = self.OriginalExecuteFunction diff --git a/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py b/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py index 76e9482ade4f..91fec45ade57 100644 --- a/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py +++ b/sdk/cosmos/azure-cosmos/test/session_token_unit_tests.py @@ -1,7 +1,7 @@ import unittest import pytest from azure.cosmos._vector_session_token import VectorSessionToken -from azure.cosmos.errors import CosmosError +from azure.cosmos.errors import CosmosHttpResponseError pytestmark = pytest.mark.cosmosEmulator @@ -11,7 +11,7 @@ class SessionTokenUnitTest(unittest.TestCase): def test_validate_successful_session_token_parsing(self): #valid session token session_token = "1#100#1=20#2=5#3=30" - self.assertEquals(VectorSessionToken.create(session_token).convert_to_string(), "1#100#1=20#2=5#3=30") + self.assertEqual(VectorSessionToken.create(session_token).convert_to_string(), "1#100#1=20#2=5#3=30") def test_validate_session_token_parsing_with_invalid_version(self): session_token = "foo#100#1=20#2=5#3=30" @@ -76,5 +76,5 @@ def test_validate_session_token_comparison(self): try: session_token1.merge(session_token2) self.fail("Region progress can not be different when version is same") - except CosmosError as e: - self.assertEquals(str(e), "Status Code: 500. Compared session tokens '1#101#1=20#2=5#3=30' and '1#100#1=20#2=5#3=30#4=40' have unexpected regions.") + except CosmosHttpResponseError as e: + self.assertEqual(str(e), "Status code: 500\nCompared session tokens '1#101#1=20#2=5#3=30' and '1#100#1=20#2=5#3=30#4=40' have unexpected regions.") diff --git a/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py b/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py index 73ab656627b2..2f92dd8a92f5 100644 --- a/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py +++ b/sdk/cosmos/azure-cosmos/test/streaming_failover_test.py @@ -3,6 +3,7 @@ import pytest import azure.cosmos.documents as documents import azure.cosmos.errors as errors +import test_config from azure.cosmos.http_constants import HttpHeaders, StatusCodes, SubStatusCodes from azure.cosmos import _retry_utility from azure.cosmos import _endpoint_discovery_retry_policy @@ -83,7 +84,11 @@ def _MockExecuteFunctionEndpointDiscover(self, function, *args, **kwargs): return ({}, {}) else: self.endpoint_sequence.append(args[1].location_endpoint_to_route) - raise errors.HTTPFailure(StatusCodes.FORBIDDEN, "Request is not permitted in this region", {HttpHeaders.SubStatus: SubStatusCodes.WRITE_FORBIDDEN}) + response = test_config.FakeResponse({HttpHeaders.SubStatus: SubStatusCodes.WRITE_FORBIDDEN}) + raise errors.CosmosHttpResponseError( + status_code=StatusCodes.FORBIDDEN, + message="Request is not permitted in this region", + response=response) def test_retry_policy_does_not_mark_null_locations_unavailable(self): self.original_get_database_account = cosmos_client_connection.CosmosClientConnection.GetDatabaseAccount @@ -107,7 +112,8 @@ def test_retry_policy_does_not_mark_null_locations_unavailable(self): self._write_counter = 0 request = RequestObject(http_constants.ResourceType.Document, documents._OperationType.Read) endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) - endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN)) + endpointDiscovery_retry_policy.ShouldRetry(errors.CosmosHttpResponseError( + status_code=http_constants.StatusCodes.FORBIDDEN)) self.assertEqual(self._read_counter, 0) self.assertEqual(self._write_counter, 0) @@ -115,7 +121,8 @@ def test_retry_policy_does_not_mark_null_locations_unavailable(self): self._write_counter = 0 request = RequestObject(http_constants.ResourceType.Document, documents._OperationType.Create) endpointDiscovery_retry_policy = _endpoint_discovery_retry_policy.EndpointDiscoveryRetryPolicy(documents.ConnectionPolicy(), endpoint_manager, request) - endpointDiscovery_retry_policy.ShouldRetry(errors.HTTPFailure(http_constants.StatusCodes.FORBIDDEN)) + endpointDiscovery_retry_policy.ShouldRetry(errors.CosmosHttpResponseError( + status_code=http_constants.StatusCodes.FORBIDDEN)) self.assertEqual(self._read_counter, 0) self.assertEqual(self._write_counter, 0) diff --git a/sdk/cosmos/azure-cosmos/test/test_config.py b/sdk/cosmos/azure-cosmos/test/test_config.py index cb3d053b524a..82dcafff145a 100644 --- a/sdk/cosmos/azure-cosmos/test/test_config.py +++ b/sdk/cosmos/azure-cosmos/test/test_config.py @@ -25,7 +25,7 @@ import azure.cosmos.documents as documents import azure.cosmos.errors as errors from azure.cosmos.http_constants import StatusCodes -from azure.cosmos.database import Database +from azure.cosmos.database import DatabaseProxy from azure.cosmos.cosmos_client import CosmosClient from azure.cosmos.partition_key import PartitionKey from azure.cosmos.partition_key import NonePartitionKeyValue @@ -40,6 +40,7 @@ class _test_config(object): #[SuppressMessage("Microsoft.Security", "CS002:SecretInNextLine", Justification="Cosmos DB Emulator Key")] masterKey = os.getenv('ACCOUNT_KEY', 'C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw==') host = os.getenv('ACCOUNT_HOST', 'https://localhost:443/') + connection_str = os.getenv('ACCOUNT_CONNECTION_STR', 'AccountEndpoint={};AccountKey={};'.format(host, masterKey)) connectionPolicy = documents.ConnectionPolicy() connectionPolicy.DisableSSLVerification = True @@ -86,7 +87,7 @@ def try_delete_database(cls, client): # type: (CosmosClient) -> None try: client.delete_database(cls.TEST_DATABASE_ID) - except errors.HTTPFailure as e: + except errors.CosmosHttpResponseError as e: if e.status_code != StatusCodes.NOT_FOUND: raise e @@ -161,5 +162,12 @@ def remove_all_documents(cls, document_collection, use_custom_partition_key): # sleep to ensure deletes are propagated for multimaster enabled accounts time.sleep(2) break - except errors.HTTPFailure as e: - print("Error occurred while deleting documents:" + str(e) + " \nRetrying...") \ No newline at end of file + except errors.CosmosHttpResponseError as e: + print("Error occurred while deleting documents:" + str(e) + " \nRetrying...") + + +class FakeResponse: + def __init__(self, headers): + self.headers = headers + self.reason = "foo" + self.status_code = "bar" diff --git a/sdk/cosmos/azure-cosmos/test/ttl_tests.py b/sdk/cosmos/azure-cosmos/test/ttl_tests.py index 9be94249f36a..d14a189bf08b 100644 --- a/sdk/cosmos/azure-cosmos/test/ttl_tests.py +++ b/sdk/cosmos/azure-cosmos/test/ttl_tests.py @@ -60,7 +60,7 @@ def __AssertHTTPFailureWithStatus(self, status_code, func, *args, **kwargs): try: func(*args, **kwargs) self.assertFalse(True, 'function should fail.') - except errors.HTTPFailure as inst: + except errors.CosmosHttpResponseError as inst: self.assertEqual(inst.status_code, status_code) @classmethod @@ -71,7 +71,7 @@ def setUpClass(cls): "You must specify your Azure Cosmos account values for " "'masterKey' and 'host' at the top of this class to run the " "tests.") - cls.client = cosmos_client.CosmosClient(cls.host, {'masterKey': cls.masterKey}, connection_policy=cls.connectionPolicy) + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, connection_policy=cls.connectionPolicy) cls.created_db = test_config._test_config.create_database_if_not_exist(cls.client) def test_collection_and_document_ttl_values(self): diff --git a/sdk/cosmos/azure-cosmos/test/utils_tests.py b/sdk/cosmos/azure-cosmos/test/utils_tests.py index 4349502fe854..589878fee7b2 100644 --- a/sdk/cosmos/azure-cosmos/test/utils_tests.py +++ b/sdk/cosmos/azure-cosmos/test/utils_tests.py @@ -21,9 +21,11 @@ import unittest import pytest +import azure.cosmos import azure.cosmos._utils as _utils import platform import azure.cosmos.http_constants as http_constants +import test_config pytestmark = pytest.mark.cosmosEmulator @@ -35,12 +37,20 @@ class UtilsTests(unittest.TestCase): def test_user_agent(self): user_agent = _utils.get_user_agent() - expected_user_agent = "{}/{} Python/{} azure-cosmos/{}".format( - platform.system(), platform.release(), platform.python_version(), - http_constants.Versions.SDKVersion + expected_user_agent = "azsdk-python-cosmos/{} Python/{} ({})".format( + azure.cosmos.__version__, + platform.python_version(), + platform.platform() ) - self.assertEqual(user_agent, expected_user_agent) + + def test_connection_string(self): + client = azure.cosmos.CosmosClient.from_connection_string(test_config._test_config.connection_str) + databases = list(client.list_databases()) + assert len(databases) > 0 + assert isinstance(databases[0], dict) + assert databases[0].get('_etag') is not None + if __name__ == "__main__": unittest.main()