diff --git a/sdk/cosmos/azure-cosmos/CHANGELOG.md b/sdk/cosmos/azure-cosmos/CHANGELOG.md index 0fb717f6ce83..3cb2bb888086 100644 --- a/sdk/cosmos/azure-cosmos/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos/CHANGELOG.md @@ -1,10 +1,11 @@ ## Release History -### 4.3.1 (2022-08-12) +### 4.3.1 (Unreleased) #### Features Added - GA release of integrated cache functionality. For more information on integrated cache please see [Azure Cosmos DB integrated cache](https://docs.microsoft.com/azure/cosmos-db/integrated-cache). - Added ability to replace analytical ttl on containers. For more information on analytical ttl please see [Azure Cosmos DB analytical store](https://docs.microsoft.com/azure/cosmos-db/analytical-store-introduction). +- Added the ability to create containers and databases with autoscale properties for the sync and async clients. #### Bugs Fixed - Fixed parsing of args for overloaded `container.read()` method. diff --git a/sdk/cosmos/azure-cosmos/README.md b/sdk/cosmos/azure-cosmos/README.md index 4a7bd58df4f7..2548cda63108 100644 --- a/sdk/cosmos/azure-cosmos/README.md +++ b/sdk/cosmos/azure-cosmos/README.md @@ -79,7 +79,7 @@ client = CosmosClient(URL, credential=KEY) ### AAD Authentication You can also authenticate a client utilizing your service principal's AAD credentials and the azure identity package. -You can directly pass in the credentials information to ClientSecretCrednetial, or use the DefaultAzureCredential: +You can directly pass in the credentials information to ClientSecretCredential, or use the DefaultAzureCredential: ```Python from azure.cosmos import CosmosClient from azure.identity import ClientSecretCredential, DefaultAzureCredential @@ -162,7 +162,6 @@ Currently the features below are **not supported**. For alternatives options, ch * Get CollectionSizeUsage, DatabaseUsage, and DocumentUsage metrics * Create Geospatial Index -* Provision Autoscale DBs or containers * Update Autoscale throughput * Get the connection string * Get the minimum RU/s of a container diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py index 406418f1e0cc..6565ebed8c89 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/__init__.py @@ -27,6 +27,7 @@ from .user import UserProxy from .scripts import ScriptsProxy from .offer import Offer +from .offer import ThroughputProperties from .documents import ( ConsistencyLevel, DataType, @@ -62,5 +63,6 @@ "TriggerOperation", "TriggerType", "ConnectionRetryPolicy", + "ThroughputProperties", ) __version__ = VERSION diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py index fd45b92bf660..ebcf8cf60e03 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/_base.py @@ -27,18 +27,17 @@ import json import uuid import binascii -from typing import Dict, Any +from typing import Dict, Any, Union from urllib.parse import quote as urllib_quote from urllib.parse import urlsplit - from azure.core import MatchConditions - from . import auth from . import documents from . import partition_key from . import http_constants from . import _runtime_constants +from .offer import ThroughputProperties # pylint: disable=protected-access @@ -297,6 +296,9 @@ def GetHeaders( # pylint: disable=too-many-statements,too-many-branches if options.get("maxIntegratedCacheStaleness"): headers[http_constants.HttpHeaders.DedicatedGatewayCacheStaleness] = options["maxIntegratedCacheStaleness"] + if options.get("autoUpgradePolicy"): + headers[http_constants.HttpHeaders.AutoscaleSettings] = options["autoUpgradePolicy"] + return headers @@ -674,3 +676,60 @@ def validate_cache_staleness_value(max_integrated_cache_staleness): if max_integrated_cache_staleness < 0: raise ValueError("Parameter 'max_integrated_cache_staleness_in_ms' can only be an " "integer greater than or equal to zero") + + +def _stringify_auto_scale(offer: Dict[str, Any]) -> Any: + auto_scale_params = None + max_throughput = offer.auto_scale_max_throughput + increment_percent = offer.auto_scale_increment_percent + auto_scale_params = {"maxThroughput": max_throughput} + if increment_percent is not None: + auto_scale_params["autoUpgradePolicy"] = {"throughputPolicy": {"incrementPercent": increment_percent}} + auto_scale_settings = json.dumps(auto_scale_params) + + return auto_scale_settings + + +def _set_throughput_options(offer: Union[int, ThroughputProperties], request_options: Dict[str, Any]) -> Any: + if offer is not None: + try: + max_throughput = offer.auto_scale_max_throughput + increment_percent = offer.auto_scale_increment_percent + + if max_throughput is not None: + request_options['autoUpgradePolicy'] = _stringify_auto_scale(offer=offer) + elif increment_percent: + raise ValueError("auto_scale_max_throughput must be supplied in " + "conjunction with auto_scale_increment_percent") + if offer.offer_throughput: + request_options["offerThroughput"] = offer.offer_throughput + + except AttributeError: + if isinstance(offer, int): + request_options["offerThroughput"] = offer + else: + raise TypeError("offer_throughput must be int or an instance of ThroughputProperties") + + +def _deserialize_throughput(throughput: list) -> Any: + throughput_properties = throughput + try: + max_throughput = throughput_properties[0]['content']['offerAutopilotSettings']['maxThroughput'] + except (KeyError, TypeError): # Adding TypeError just in case one of these dicts is None + max_throughput = None + try: + increment_percent = \ + throughput_properties[0]['content']['offerAutopilotSettings']['autoUpgradePolicy']['throughputPolicy'][ + 'incrementPercent'] + except (KeyError, TypeError): + increment_percent = None + try: + throughput = throughput_properties[0]["content"]["offerThroughput"] + except (KeyError, TypeError): + throughput = None + return ThroughputProperties( + auto_scale_max_throughput=max_throughput, + auto_scale_increment_percent=increment_percent, + offer_throughput=throughput, + properties=throughput_properties[0] + ) diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py index 53d1f80f39b4..7ea93a436d9b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_container.py @@ -29,7 +29,7 @@ from azure.core.tracing.decorator_async import distributed_trace_async # type: ignore from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options as _build_options, validate_cache_staleness_value +from .._base import build_options as _build_options, validate_cache_staleness_value, _deserialize_throughput from ..exceptions import CosmosResourceNotFoundError from ..http_constants import StatusCodes from ..offer import ThroughputProperties @@ -594,8 +594,7 @@ async def get_throughput(self, **kwargs: Any) -> ThroughputProperties: if response_hook: response_hook(self.client_connection.last_response_headers, throughput_properties) - return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], - properties=throughput_properties[0]) + return _deserialize_throughput(throughput=throughput_properties) @distributed_trace_async async def replace_throughput(self, throughput: int, **kwargs: Any) -> ThroughputProperties: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py index 9e9707c44356..d451b6df284d 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_cosmos_client.py @@ -31,7 +31,8 @@ from ..cosmos_client import _parse_connection_str, _build_auth from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options as _build_options +from .._base import build_options as _build_options, _set_throughput_options +from ..offer import ThroughputProperties from ._retry_utility_async import _ConnectionRetryPolicy from ._database import DatabaseProxy from ..documents import ConnectionPolicy, DatabaseAccount @@ -196,7 +197,8 @@ async def create_database( # pylint: disable=redefined-builtin Create a new database with the given ID (name). :param str id: ID (name) of the database to create. - :keyword int offer_throughput: The provisioned throughput for this offer. + :keyword offer_throughput: The provisioned throughput for this offer. + :paramtype offer_throughput: int or ~azure.cosmos.ThroughputProperties. :keyword str session_token: Token for use with Session consistency. :keyword dict[str, str] initial_headers: Initial headers to be sent as part of the request. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource @@ -223,8 +225,7 @@ async def create_database( # pylint: disable=redefined-builtin request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) offer_throughput = kwargs.pop('offer_throughput', None) - if offer_throughput is not None: - request_options["offerThroughput"] = offer_throughput + _set_throughput_options(offer=offer_throughput, request_options=request_options) result = await self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) if response_hook: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_database.py index 3a2385703511..f4eec17cf79b 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/aio/_database.py @@ -30,7 +30,7 @@ from azure.core.tracing.decorator import distributed_trace from ._cosmos_client_connection_async import CosmosClientConnection -from .._base import build_options as _build_options +from .._base import build_options as _build_options, _set_throughput_options, _deserialize_throughput from ._container import ContainerProxy from ..offer import ThroughputProperties from ..http_constants import StatusCodes @@ -164,7 +164,8 @@ async def create_container( :keyword dict[str, str] indexing_policy: The indexing policy to apply to the container. :keyword int default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. - :keyword int offer_throughput: The provisioned throughput for this offer. + :keyword offer_throughput: The provisioned throughput for this offer. + :paramtype offer_throughput: int or ~azure.cosmos.ThroughputProperties. :keyword dict[str, str] unique_key_policy: The unique key policy to apply to the container. :keyword dict[str, str] conflict_resolution_policy: The conflict resolution policy to apply to the container. :keyword str session_token: Token for use with Session consistency. @@ -227,8 +228,7 @@ async def create_container( request_options = _build_options(kwargs) response_hook = kwargs.pop('response_hook', None) offer_throughput = kwargs.pop('offer_throughput', None) - if offer_throughput is not None: - request_options["offerThroughput"] = offer_throughput + _set_throughput_options(offer=offer_throughput, request_options=request_options) data = await self.client_connection.CreateContainer( database_link=self.database_link, collection=definition, options=request_options, **kwargs @@ -258,7 +258,8 @@ async def create_container_if_not_exists( :keyword dict[str, str] indexing_policy: The indexing policy to apply to the container. :keyword int default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. - :keyword int offer_throughput: The provisioned throughput for this offer. + :keyword offer_throughput: The provisioned throughput for this offer. + :paramtype offer_throughput: int or ~azure.cosmos.ThroughputProperties. :keyword dict[str, str] unique_key_policy: The unique key policy to apply to the container. :keyword dict[str, str] conflict_resolution_policy: The conflict resolution policy to apply to the container. :keyword str session_token: Token for use with Session consistency. @@ -749,9 +750,7 @@ async def get_throughput(self, **kwargs: Any) -> ThroughputProperties: if response_hook: response_hook(self.client_connection.last_response_headers, throughput_properties) - return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], - properties=throughput_properties[0]) - + return _deserialize_throughput(throughput=throughput_properties) @distributed_trace_async async def replace_throughput(self, throughput: int, **kwargs: Any) -> ThroughputProperties: diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py index a02b2bb5c419..721ed3ea2390 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/container.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/container.py @@ -29,7 +29,7 @@ from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from ._base import build_options, validate_cache_staleness_value +from ._base import build_options, validate_cache_staleness_value, _deserialize_throughput from .exceptions import CosmosResourceNotFoundError from .http_constants import StatusCodes from .offer import ThroughputProperties @@ -665,8 +665,7 @@ def get_throughput(self, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, throughput_properties) - return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], - properties=throughput_properties[0]) + return _deserialize_throughput(throughput=throughput_properties) @distributed_trace def replace_throughput(self, throughput, **kwargs): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py index 8110afbc787d..3e62dd2b9001 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/cosmos_client.py @@ -27,7 +27,8 @@ from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from ._base import build_options +from ._base import build_options, _set_throughput_options +from .offer import ThroughputProperties from ._retry_utility import ConnectionRetryPolicy from .database import DatabaseProxy from .documents import ConnectionPolicy, DatabaseAccount @@ -230,7 +231,7 @@ def create_database( # pylint: disable=redefined-builtin self, id, # type: str populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] + offer_throughput=None, # type: Optional[Union[int, ThroughputProperties]] **kwargs # type: Any ): # type: (...) -> DatabaseProxy @@ -238,7 +239,8 @@ def create_database( # pylint: disable=redefined-builtin Create a new database with the given ID (name). :param id: ID (name) of the database to create. - :param int offer_throughput: The provisioned throughput for this offer. + :param offer_throughput: The provisioned throughput for this offer. + :paramtype offer_throughput: int or ~azure.cosmos.ThroughputProperties. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource @@ -268,8 +270,7 @@ def create_database( # pylint: disable=redefined-builtin UserWarning, ) request_options["populateQueryMetrics"] = populate_query_metrics - if offer_throughput is not None: - request_options["offerThroughput"] = offer_throughput + _set_throughput_options(offer=offer_throughput, request_options=request_options) result = self.client_connection.CreateDatabase(database=dict(id=id), options=request_options, **kwargs) if response_hook: @@ -281,7 +282,7 @@ def create_database_if_not_exists( # pylint: disable=redefined-builtin self, id, # type: str populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] + offer_throughput=None, # type: Optional[Union[int, ThroughputProperties]] **kwargs # type: Any ): # type: (...) -> DatabaseProxy @@ -296,7 +297,8 @@ def create_database_if_not_exists( # pylint: disable=redefined-builtin :param id: ID (name) of the database to read or create. :param bool populate_query_metrics: Enable returning query metrics in response headers. - :param int offer_throughput: The provisioned throughput for this offer. + :param offer_throughput: The provisioned throughput for this offer. + :type offer_throughput: int or ~azure.cosmos.ThroughputProperties. :keyword str session_token: Token for use with Session consistency. :keyword dict[str,str] initial_headers: Initial headers to be sent as part of the request. :keyword str etag: An ETag value, or the wildcard character (*). Used to check if the resource diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py index bc3935d99c22..1152cea0f823 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/database.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/database.py @@ -28,7 +28,7 @@ from azure.core.tracing.decorator import distributed_trace # type: ignore from ._cosmos_client_connection import CosmosClientConnection -from ._base import build_options +from ._base import build_options, _set_throughput_options, _deserialize_throughput from .container import ContainerProxy from .offer import ThroughputProperties from .http_constants import StatusCodes @@ -155,7 +155,7 @@ def create_container( indexing_policy=None, # type: Optional[Dict[str, Any]] default_ttl=None, # type: Optional[int] populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] + offer_throughput=None, # type: Optional[Union[int, ThroughputProperties]] unique_key_policy=None, # type: Optional[Dict[str, Any]] conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] **kwargs # type: Any @@ -170,6 +170,7 @@ def create_container( :param indexing_policy: The indexing policy to apply to the container. :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. :param offer_throughput: The provisioned throughput for this offer. + :type offer_throughput: int or ~azure.cosmos.ThroughputProperties. :param unique_key_policy: The unique key policy to apply to the container. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. :keyword str session_token: Token for use with Session consistency. @@ -232,9 +233,7 @@ def create_container( UserWarning, ) request_options["populateQueryMetrics"] = populate_query_metrics - if offer_throughput is not None: - request_options["offerThroughput"] = offer_throughput - + _set_throughput_options(offer=offer_throughput, request_options=request_options) data = self.client_connection.CreateContainer( database_link=self.database_link, collection=definition, options=request_options, **kwargs ) @@ -252,7 +251,7 @@ def create_container_if_not_exists( indexing_policy=None, # type: Optional[Dict[str, Any]] default_ttl=None, # type: Optional[int] populate_query_metrics=None, # type: Optional[bool] - offer_throughput=None, # type: Optional[int] + offer_throughput=None, # type: Optional[Union[int, ThroughputProperties]] unique_key_policy=None, # type: Optional[Dict[str, Any]] conflict_resolution_policy=None, # type: Optional[Dict[str, Any]] **kwargs # type: Any @@ -270,6 +269,7 @@ def create_container_if_not_exists( :param default_ttl: Default time to live (TTL) for items in the container. If unspecified, items do not expire. :param populate_query_metrics: Enable returning query metrics in response headers. :param offer_throughput: The provisioned throughput for this offer. + :paramtype offer_throughput: int or ~azure.cosmos.ThroughputProperties. :param unique_key_policy: The unique key policy to apply to the container. :param conflict_resolution_policy: The conflict resolution policy to apply to the container. :keyword str session_token: Token for use with Session consistency. @@ -774,8 +774,7 @@ def get_throughput(self, **kwargs): if response_hook: response_hook(self.client_connection.last_response_headers, throughput_properties) - return ThroughputProperties(offer_throughput=throughput_properties[0]["content"]["offerThroughput"], - properties=throughput_properties[0]) + return _deserialize_throughput(throughput=throughput_properties) @distributed_trace def replace_throughput(self, throughput, **kwargs): diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py index f977b819a97f..5fc25c33188a 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/http_constants.py @@ -166,6 +166,7 @@ class HttpHeaders(object): # Offer type. OfferType = "x-ms-offer-type" OfferThroughput = "x-ms-offer-throughput" + AutoscaleSettings = "x-ms-cosmos-offer-autopilot-settings" # Custom RUs/minute headers DisableRUPerMinuteUsage = "x-ms-documentdb-disable-ru-per-minute-usage" diff --git a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py index 047c61948603..b5cdd49b7df2 100644 --- a/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py +++ b/sdk/cosmos/azure-cosmos/azure/cosmos/offer.py @@ -24,13 +24,25 @@ class ThroughputProperties(object): """Represents the throughput properties in an Azure Cosmos DB SQL API container. - To read and update throughput properties use the associated methods on the :class:`Container`. + + To read and update throughput properties, use the associated methods on the :class:`Container`. + If configuring auto-scale, `auto_scale_max_throughput` needs to be set and + `auto_scale_increment_percent` can also be set in conjunction with it. + The value of `offer_throughput` will not be allowed to be set in conjunction with the auto-scale settings. + + :keyword int offer_throughput: The provisioned throughput in request units per second as a number. + :keyword int auto_scale_max_throughput: The max auto-scale throughput. It should have a valid throughput + value between 1000 and 1000000 inclusive, in increments of 1000. + :keyword int auto_scale_increment_percent: is the % from the base selected RU it increases at a given time, + the increment percent should be greater than or equal to zero. """ - def __init__(self, offer_throughput, properties=None): # pylint: disable=super-init-not-called - # type: (int, Dict[str, Any]) -> None - self.offer_throughput = offer_throughput - self.properties = properties + def __init__(self, *args, **kwargs): # pylint: disable=super-init-not-called + self.offer_throughput = args[0] if args else kwargs.get('offer_throughput') + self.properties = args[1] if len(args) > 1 else kwargs.get('properties') + self.auto_scale_max_throughput = kwargs.get('auto_scale_max_throughput') + self.auto_scale_increment_percent = kwargs.get('auto_scale_increment_percent') + Offer = ThroughputProperties diff --git a/sdk/cosmos/azure-cosmos/samples/container_management.py b/sdk/cosmos/azure-cosmos/samples/container_management.py index b3c55434eb48..0c3dd7fbba07 100644 --- a/sdk/cosmos/azure-cosmos/samples/container_management.py +++ b/sdk/cosmos/azure-cosmos/samples/container_management.py @@ -181,6 +181,19 @@ def create_container(db, id): except exceptions.CosmosResourceExistsError: print('A container with id \'_container_analytical_store\' already exists') + print("\n2.8 Create Container - With auto scale settings") + + try: + container = db.create_container( + id=id+"_container_auto_scale_settings", + partition_key=partition_key, + offer_throughput=ThroughputProperties(auto_scale_max_throughput=5000, auto_scale_increment_percent=0) + ) + print('Container with id \'{0}\' created'.format(container.id)) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(coll['id'])) + def manage_provisioned_throughput(db, id): diff --git a/sdk/cosmos/azure-cosmos/samples/container_management_async.py b/sdk/cosmos/azure-cosmos/samples/container_management_async.py index 959bb2ae805b..3dabe2aa6ea6 100644 --- a/sdk/cosmos/azure-cosmos/samples/container_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/container_management_async.py @@ -200,6 +200,19 @@ async def create_container(db, id): except Exception: print('Creating container with analytical storage can only happen in synapse link activated accounts, skipping step') + print("\n2.8 Create Container - With auto scale settings") + + try: + container = await db.create_container( + id=id+"_container_auto_scale_settings", + partition_key=partition_key, + offer_throughput=ThroughputProperties(auto_scale_max_throughput=5000, auto_scale_increment_percent=0) + ) + print('Container with id \'{0}\' created'.format(container.id)) + + except exceptions.CosmosResourceExistsError: + print('A container with id \'{0}\' already exists'.format(coll['id'])) + async def manage_provisioned_throughput(db, id): diff --git a/sdk/cosmos/azure-cosmos/samples/database_management.py b/sdk/cosmos/azure-cosmos/samples/database_management.py index 5e747c5d2485..2ff9d56e3a59 100644 --- a/sdk/cosmos/azure-cosmos/samples/database_management.py +++ b/sdk/cosmos/azure-cosmos/samples/database_management.py @@ -60,6 +60,17 @@ def create_database(client, id): except exceptions.CosmosResourceExistsError: print('A database with id \'{0}\' already exists'.format(id)) + print("\n2.8 Create Database - With auto scale settings") + + try: + client.create_database( + id=id, + offer_throughput=ThroughputProperties(auto_scale_max_throughput=5000, auto_scale_increment_percent=0)) + print('Database with id \'{0}\' created'.format(id)) + + except exceptions.CosmosResourceExistsError: + print('A database with id \'{0}\' already exists'.format(id)) + def read_database(client, id): print("\n3. Get a Database by id") diff --git a/sdk/cosmos/azure-cosmos/samples/database_management_async.py b/sdk/cosmos/azure-cosmos/samples/database_management_async.py index fbb4aa910167..bb420336a318 100644 --- a/sdk/cosmos/azure-cosmos/samples/database_management_async.py +++ b/sdk/cosmos/azure-cosmos/samples/database_management_async.py @@ -72,6 +72,17 @@ async def create_database(client, id): except exceptions.CosmosResourceExistsError: print('A database with id \'{0}\' already exists'.format(id)) + print("\n2.8 Create Database - With auto scale settings") + + try: + await client.create_database( + id=id, + offer_throughput=ThroughputProperties(auto_scale_max_throughput=5000, auto_scale_increment_percent=0)) + print('Database with id \'{0}\' created'.format(id)) + + except exceptions.CosmosResourceExistsError: + print('A database with id \'{0}\' already exists'.format(id)) + # Alternatively, you can also use the create_database_if_not_exists method to avoid using a try catch # This method attempts to read the database first, and based on the result either creates or returns # the existing database. Due to the additional overhead from attempting a read, it is recommended diff --git a/sdk/cosmos/azure-cosmos/test/test_auto_scale.py b/sdk/cosmos/azure-cosmos/test/test_auto_scale.py new file mode 100644 index 000000000000..0fde52478654 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/test/test_auto_scale.py @@ -0,0 +1,119 @@ +# The MIT License (MIT) +# Copyright (c) 2022 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +import unittest +import azure.cosmos.cosmos_client as cosmos_client +import azure.cosmos.exceptions as exceptions +from azure.cosmos import ThroughputProperties, PartitionKey, http_constants +import pytest +import test_config + +pytestmark = pytest.mark.cosmosEmulator + + +@pytest.mark.usefixtures("teardown") +class AutoScaleTest(unittest.TestCase): + host = test_config._test_config.host + masterKey = test_config._test_config.masterKey + connectionPolicy = test_config._test_config.connectionPolicy + + @classmethod + def setUpClass(cls): + if (cls.masterKey == '[YOUR_KEY_HERE]' or + cls.host == '[YOUR_ENDPOINT_HERE]'): + raise Exception( + "You must specify your Azure Cosmos account values for " + "'masterKey' and 'host' at the top of this class to run the " + "tests.") + + cls.client = cosmos_client.CosmosClient(cls.host, cls.masterKey, consistency_level="Session", + connection_policy=cls.connectionPolicy) + cls.created_database = cls.client.create_database(test_config._test_config.TEST_DATABASE_ID) + + def test_auto_scale(self): + created_container = self.created_database.create_container( + id='container_with_auto_scale_settings', + partition_key=PartitionKey(path="/id"), + offer_throughput=ThroughputProperties(auto_scale_max_throughput=7000, auto_scale_increment_percent=0) + + ) + created_container_properties = created_container.get_throughput() + # Testing the input value of the max_throughput + self.assertEqual( + created_container_properties.auto_scale_max_throughput, 7000) + self.assertEqual(created_container_properties.auto_scale_increment_percent, 0) + self.assertEqual(created_container_properties.offer_throughput, None) + + self.created_database.delete_container(created_container) + + # Testing the incorrect passing of an input value of the max_throughput to verify negative behavior + with pytest.raises(exceptions.CosmosHttpResponseError) as e: + created_container = self.created_database.create_container( + id='container_with_wrong_auto_scale_settings', + partition_key=PartitionKey(path="/id"), + offer_throughput=ThroughputProperties(auto_scale_max_throughput=-200, auto_scale_increment_percent=0)) + assert "Requested throughput -200 is less than required minimum throughput 1000" in str(e.value) + + def test_create_container_if_not_exist(self): + # Testing auto_scale_settings for the create_container_if_not_exists method + created_container = self.created_database.create_container_if_not_exists( + id='container_with_auto_scale_settings', + partition_key=PartitionKey(path="/id"), + offer_throughput=ThroughputProperties(auto_scale_max_throughput=1000, auto_scale_increment_percent=3) + ) + created_container_properties = created_container.get_throughput() + # Testing the incorrect input value of the max_throughput + self.assertNotEqual( + created_container_properties.auto_scale_max_throughput, 2000) + # Testing the input value of the increment_percentage + self.assertEqual( + created_container_properties.auto_scale_increment_percent, 3) + + self.client.delete_database(test_config._test_config.TEST_DATABASE_ID) + + def test_create_database(self): + # Testing auto_scale_settings for the create_database method + created_database = self.client.create_database("db1", offer_throughput=ThroughputProperties( + auto_scale_max_throughput=5000, + auto_scale_increment_percent=0)) + created_db_properties = created_database.get_throughput() + # Testing the input value of the max_throughput + self.assertEqual( + created_db_properties.auto_scale_max_throughput, 5000) + # Testing the input value of the increment_percentage + self.assertEqual( + created_db_properties.auto_scale_increment_percent, 0) + + self.client.delete_database("db1") + + def test_create_database_if_not_exists(self): + # Testing auto_scale_settings for the create_database_if_not_exists method + created_database = self.client.create_database_if_not_exists("db2", offer_throughput=ThroughputProperties( + auto_scale_max_throughput=9000, + auto_scale_increment_percent=11)) + created_db_properties = created_database.get_throughput() + # Testing the input value of the max_throughput + self.assertNotEqual( + created_db_properties.auto_scale_max_throughput, 8000) + # Testing the input value of the increment_percentage + self.assertEqual( + created_db_properties.auto_scale_increment_percent, 11) + + self.client.delete_database("db2") diff --git a/sdk/cosmos/azure-cosmos/test/test_auto_scale_async.py b/sdk/cosmos/azure-cosmos/test/test_auto_scale_async.py new file mode 100644 index 000000000000..717445d47d24 --- /dev/null +++ b/sdk/cosmos/azure-cosmos/test/test_auto_scale_async.py @@ -0,0 +1,130 @@ +# The MIT License (MIT) +# Copyright (c) 2022 Microsoft Corporation + +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: + +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. + +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +import unittest +from azure.cosmos.aio import CosmosClient +import azure.cosmos.exceptions as exceptions +from azure.cosmos import ThroughputProperties, PartitionKey, http_constants +import pytest +import test_config + +pytestmark = pytest.mark.cosmosEmulator + + +@pytest.mark.usefixtures("teardown") +class AutoScaleTest(unittest.TestCase): + host = test_config._test_config.host + masterKey = test_config._test_config.masterKey + connectionPolicy = test_config._test_config.connectionPolicy + + @classmethod + async def setUpClass(cls): + if (cls.masterKey == '[YOUR_KEY_HERE]' or + cls.host == '[YOUR_ENDPOINT_HERE]'): + raise Exception( + "You must specify your Azure Cosmos account values for " + "'masterKey' and 'host' at the top of this class to run the " + "tests.") + + cls.client = CosmosClient(cls.host, cls.masterKey, consistency_level="Session", + connection_policy=cls.connectionPolicy) + cls.created_database = await cls.client.create_database(test_config._test_config.TEST_DATABASE_ID) + + async def test_auto_scale(self): + created_container = await self.created_database.create_container( + id='container_with_auto_scale_settings', + partition_key=PartitionKey(path="/id"), + offer_throughput=ThroughputProperties(auto_scale_max_throughput=5000, auto_scale_increment_percent=0) + + ) + created_container_properties = await created_container.get_throughput() + # Testing the input value of the max_throughput + self.assertEqual( + created_container_properties.auto_scale_max_throughput, 5000) + self.assertEqual(created_container_properties.auto_scale_increment_percent, 0) + self.assertEqual(created_container_properties.offer_throughput, None) + + await self.created_database.delete_container(created_container) + + # Testing the incorrect passing of an input value of the max_throughput to verify negative behavior + with pytest.raises(exceptions.CosmosHttpResponseError) as e: + created_container = await self.created_database.create_container( + id='container_with_wrong_auto_scale_settings', + partition_key=PartitionKey(path="/id"), + offer_throughput=ThroughputProperties(auto_scale_max_throughput=-200, auto_scale_increment_percent=0)) + assert "Requested throughput -200 is less than required minimum throughput 1000" in str(e.value) + + async def test_create_container_if_not_exist(self): + # Testing auto_scale_settings for the create_container_if_not_exists method + created_container = await self.created_database.create_container_if_not_exists( + id='container_with_auto_scale_settings', + partition_key=PartitionKey(path="/id"), + offer_throughput=ThroughputProperties(auto_scale_max_throughput=1000, auto_scale_increment_percent=0) + ) + created_container_properties = await created_container.get_throughput() + # Testing the incorrect input value of the max_throughput + self.assertNotEqual( + created_container_properties.auto_scale_max_throughput, 2000) + + await self.created_database.delete_container(created_container) + + created_container = await self.created_database.create_container_if_not_exists( + id='container_with_auto_scale_settings', + partition_key=PartitionKey(path="/id"), + offer_throughput=ThroughputProperties(auto_scale_max_throughput=5000, auto_scale_increment_percent=2) + + ) + created_container_properties = await created_container.get_throughput() + # Testing the incorrect input value of the max_increment_percentage + self.assertNotEqual( + created_container_properties.auto_scale_increment_percent, 3) + + await self.created_database.delete_container(created_container) + await self.client.delete_database(test_config._test_config.TEST_DATABASE_ID) + + async def test_create_database(self): + # Testing auto_scale_settings for the create_database method + created_database = await self.client.create_database("db1", offer_throughput=ThroughputProperties( + auto_scale_max_throughput=5000, + auto_scale_increment_percent=0)) + created_db_properties = await created_database.get_throughput() + # Testing the input value of the max_throughput + self.assertEqual( + created_db_properties.auto_scale_max_throughput, 5000) + # Testing the input value of the increment_percentage + self.assertEqual( + created_db_properties.auto_scale_increment_percent, 0) + + await self.client.delete_database("db1") + + async def test_create_database_if_not_exists(self): + # Testing auto_scale_settings for the create_database_if_not_exists method + created_database = await self.client.create_database_if_not_exists("db2", offer_throughput=ThroughputProperties( + auto_scale_max_throughput=9000, + auto_scale_increment_percent=11)) + created_db_properties = await created_database.get_throughput() + # Testing the input value of the max_throughput + self.assertNotEqual( + created_db_properties.auto_scale_max_throughput, 8000) + # Testing the input value of the increment_percentage + self.assertEqual( + created_db_properties.auto_scale_increment_percent, 11) + + await self.client.delete_database("db2")