diff --git a/sdk/core/azure-core/perf-tests.yml b/sdk/core/azure-core/perf-tests.yml index aa5267bc1baa..a20e66ab2ac5 100644 --- a/sdk/core/azure-core/perf-tests.yml +++ b/sdk/core/azure-core/perf-tests.yml @@ -13,7 +13,7 @@ Tests: Class: UploadBinaryDataTest Arguments: - --size 1024 --parallel 64 --duration 60 --policies all - - --size 1024 --parallel 64 --duration 60 --policies all --aad + - --size 1024 --parallel 64 --duration 60 --policies all --use-entra-id - --size 10240 --parallel 32 --duration 60 - --size 10240 --parallel 32 --duration 60 --transport requests @@ -22,7 +22,7 @@ Tests: Arguments: - --size 1024 --parallel 64 --duration 60 - --size 1024 --parallel 64 --duration 60 --transport requests - - --size 1024 --parallel 64 --duration 60 --aad + - --size 1024 --parallel 64 --duration 60 --use-entra-id - --size 10240 --parallel 32 --duration 60 --policies all - Test: update-entity @@ -30,7 +30,7 @@ Tests: Arguments: - --size 1024 --parallel 64 --duration 60 - --size 1024 --parallel 64 --duration 60 --transport requests - - --size 1024 --parallel 64 --duration 60 --aad + - --size 1024 --parallel 64 --duration 60 --use-entra-id - --size 1024 --parallel 64 --duration 60 --policies all - Test: query-entities @@ -38,7 +38,7 @@ Tests: Arguments: - --size 1024 --parallel 64 --duration 60 - --size 1024 --parallel 64 --duration 60 --transport requests - - --size 1024 --parallel 64 --duration 60 --aad + - --size 1024 --parallel 64 --duration 60 --use-entra-id - --size 1024 --parallel 64 --duration 60 --policies all - Test: list-entities @@ -46,5 +46,5 @@ Tests: Arguments: - --count 500 --parallel 32 --warmup 60 --duration 60 - --count 500 --parallel 32 --warmup 60 --duration 60 --transport requests - - --count 500 --parallel 32 --warmup 60 --duration 60 --aad + - --count 500 --parallel 32 --warmup 60 --duration 60 --use-entra-id - --count 500 --parallel 32 --warmup 60 --duration 60 --policies all \ No newline at end of file diff --git a/sdk/core/azure-core/tests/perf_tests/README.md b/sdk/core/azure-core/tests/perf_tests/README.md index 6cceced0235e..a176526b3f0e 100644 --- a/sdk/core/azure-core/tests/perf_tests/README.md +++ b/sdk/core/azure-core/tests/perf_tests/README.md @@ -65,8 +65,12 @@ The options that are available for all Core perf tests: - `"requests"`: AsyncioRequestsTransport - For sync: - `"requests"`: RequestsTransport (default) -- `--aad` - Flag to pass in to use Azure Active Directory as the authentication. By default, set to False. +- `--use-entra-id` - Flag to pass in to use Microsoft Entra ID as the authentication. By default, set to False. - `--size=10240` - Size of request content (in bytes). Defaults to 10240. (Not used by `ListEntitiesPageableTest`.) +- `--policies` - List of policies to pass in to the pipeline. Options: + - None: No extra policies passed in, except for authentication policy. This is the default. + - 'all': All policies added automatically by autorest. + - 'policy1,policy2': Comma-separated list of policies, such as 'RetryPolicy,HttpLoggingPolicy'" #### Additional ListEntitiesPageableTest command line options @@ -78,5 +82,5 @@ The options that are additionally available for `ListEntitiesPageableTest`: ## Example command ```cmd -(env) ~/azure-core> perfstress DownloadBinaryDataTest --aad --transport requests --size=20480 --parallel=2 +(env) ~/azure-core> perfstress DownloadBinaryDataTest --use-entra-id --transport requests --size=20480 --parallel=2 ``` diff --git a/sdk/core/azure-core/tests/perf_tests/_test_base.py b/sdk/core/azure-core/tests/perf_tests/_test_base.py index 0217e4734191..858022cd58d4 100644 --- a/sdk/core/azure-core/tests/perf_tests/_test_base.py +++ b/sdk/core/azure-core/tests/perf_tests/_test_base.py @@ -159,7 +159,7 @@ def _build_async_pipeline_client(self, auth_policy): return AsyncPipelineClient(self.account_endpoint, pipeline=async_pipeline) def _set_auth_policies(self): - if not self.args.aad: + if not self.args.use_entra_id: # if tables, create table credential policy, else blob policy if "tables" in self.sdk_moniker: self.sync_auth_policy = TableSharedKeyCredentialPolicy( @@ -199,7 +199,9 @@ def add_arguments(parser): """\n- 'policy1,policy2': Comma-separated list of policies, such as 'RetryPolicy,HttpLoggingPolicy'""", default=None, ) - parser.add_argument("--aad", action="store_true", help="Use AAD authentication instead of shared key.") + parser.add_argument( + "--use-entra-id", action="store_true", help="Use Microsoft Entra ID authentication instead of shared key." + ) class _BlobTest(_ServiceTest): diff --git a/sdk/core/corehttp/dev_requirements.txt b/sdk/core/corehttp/dev_requirements.txt index 83961ca3a8d1..edde757ea42d 100644 --- a/sdk/core/corehttp/dev_requirements.txt +++ b/sdk/core/corehttp/dev_requirements.txt @@ -7,3 +7,6 @@ httpx>=0.25.0 -e ../../../tools/azure-devtools -e tests/testserver_tests/coretestserver ../../core/azure-core +azure-storage-blob +azure-data-tables +azure-identity diff --git a/sdk/core/corehttp/perf-resources.bicep b/sdk/core/corehttp/perf-resources.bicep new file mode 100644 index 000000000000..714540b1bf04 --- /dev/null +++ b/sdk/core/corehttp/perf-resources.bicep @@ -0,0 +1,74 @@ +param baseName string = resourceGroup().name +param location string = resourceGroup().location +param storageEndpointSuffix string = 'core.windows.net' +param testApplicationOid string + +resource storageAccount 'Microsoft.Storage/storageAccounts@2019-06-01' = { + name: '${baseName}storage' + location: location + kind: 'StorageV2' + sku: { + name: 'Standard_RAGRS' + } +} + +resource storageAccountBlobServices 'Microsoft.Storage/storageAccounts/blobServices@2021-09-01' = { + name: 'default' + parent: storageAccount +} + +resource testContainer 'Microsoft.Storage/storageAccounts/blobServices/containers@2021-09-01' = { + name: 'testcontainer' + parent: storageAccountBlobServices + +} + +@description('This is the Blob owner role.') +resource blobOwnerRoleDefinition 'Microsoft.Authorization/roleDefinitions@2018-01-01-preview' existing = { + scope: resourceGroup() + name: 'b7e6dc6d-f1e8-4753-8033-0f276bb0955b' +} + +resource blobRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = { + name: guid(resourceGroup().id, testApplicationOid, blobOwnerRoleDefinition.id) + properties: { + roleDefinitionId: blobOwnerRoleDefinition.id + principalId: testApplicationOid + principalType: 'ServicePrincipal' + } +} + +resource tableServices 'Microsoft.Storage/storageAccounts/tableServices@2022-09-01' = { + name: 'default' + parent: storageAccount +} + +resource tables 'Microsoft.Storage/storageAccounts/tableServices/tables@2022-09-01' = { + name: 'default' + parent: tableServices +} + +@description('This is the Blob owner role.') +resource tableOwnerRoleDefinition 'Microsoft.Authorization/roleDefinitions@2018-01-01-preview' existing = { + scope: resourceGroup() + name: '0a9a7e1f-b9d0-4cc4-a60d-0319b160aaa3' +} + +resource tableRoleAssignment 'Microsoft.Authorization/roleAssignments@2022-04-01' = { + name: guid(resourceGroup().id, testApplicationOid, tableOwnerRoleDefinition.id) + properties: { + roleDefinitionId: tableOwnerRoleDefinition.id + principalId: testApplicationOid + principalType: 'ServicePrincipal' + } +} + +var storageAccountKey = storageAccount.listKeys('2021-09-01').keys[0].value +output AZURE_STORAGE_ACCOUNT_NAME string = storageAccount.name +output AZURE_STORAGE_BLOBS_ENDPOINT string = storageAccount.properties.primaryEndpoints.blob +output AZURE_STORAGE_ACCOUNT_KEY string = storageAccountKey +output AZURE_STORAGE_CONN_STR string = 'DefaultEndpointsProtocol=https;AccountName=${storageAccount.name};AccountKey=${storageAccountKey};EndpointSuffix=${storageEndpointSuffix}' +output AZURE_STORAGE_CONTAINER_NAME string = testContainer.name + +output AZURE_STORAGE_TABLE_NAME string = tables.name +output AZURE_STORAGE_TABLES_ENDPOINT string = 'https://${storageAccount.name}.table.${storageEndpointSuffix}/' diff --git a/sdk/core/corehttp/perf-tests.yml b/sdk/core/corehttp/perf-tests.yml new file mode 100644 index 000000000000..0a4b09aa61b9 --- /dev/null +++ b/sdk/core/corehttp/perf-tests.yml @@ -0,0 +1,50 @@ +Service: core + +Project: sdk/core/corehttp + +PrimaryPackage: corehttp + +PackageVersions: +- corehttp: 1.0.0b4 +- corehttp: source + +Tests: +- Test: upload-binary + Class: UploadBinaryDataTest + Arguments: + - --size 1024 --parallel 64 --duration 60 --policies all + - --size 1024 --parallel 64 --duration 60 --policies all --use-entra-id + - --size 10240 --parallel 32 --duration 60 + - --size 10240 --parallel 32 --duration 60 --transport httpx + +- Test: download-binary + Class: DownloadBinaryDataTest + Arguments: + - --size 1024 --parallel 64 --duration 60 + - --size 1024 --parallel 64 --duration 60 --transport httpx + - --size 1024 --parallel 64 --duration 60 --use-entra-id + - --size 10240 --parallel 32 --duration 60 --policies all + +- Test: update-entity + Class: UpdateEntityJSONTest + Arguments: + - --size 1024 --parallel 64 --duration 60 + - --size 1024 --parallel 64 --duration 60 --transport httpx + - --size 1024 --parallel 64 --duration 60 --use-entra-id + - --size 1024 --parallel 64 --duration 60 --policies all + +- Test: query-entities + Class: QueryEntitiesJSONTest + Arguments: + - --size 1024 --parallel 64 --duration 60 + - --size 1024 --parallel 64 --duration 60 --transport httpx + - --size 1024 --parallel 64 --duration 60 --use-entra-id + - --size 1024 --parallel 64 --duration 60 --policies all + +- Test: list-entities + Class: ListEntitiesPageableTest + Arguments: + - --count 500 --parallel 32 --warmup 60 --duration 60 + - --count 500 --parallel 32 --warmup 60 --duration 60 --transport httpx + - --count 500 --parallel 32 --warmup 60 --duration 60 --use-entra-id + - --count 500 --parallel 32 --warmup 60 --duration 60 --policies all \ No newline at end of file diff --git a/sdk/core/corehttp/perf.yml b/sdk/core/corehttp/perf.yml new file mode 100644 index 000000000000..0686cc51ae03 --- /dev/null +++ b/sdk/core/corehttp/perf.yml @@ -0,0 +1,37 @@ +parameters: +- name: LanguageVersion + displayName: LanguageVersion (3.8, 3.9, 3.10, 3.11, 3.12) + type: string + default: '3.11' +- name: PackageVersions + displayName: PackageVersions (regex of package versions to run) + type: string + default: '1|source' +- name: Tests + displayName: Tests (regex of tests to run) + type: string + default: '^(upload-binary|download-binary|update-entity|query-entities|list-entities)$' +- name: Arguments + displayName: Arguments (regex of arguments to run) + type: string + default: '(1024)|(10240)|(500)' +- name: Iterations + displayName: Iterations (times to run each test) + type: number + default: '5' +- name: AdditionalArguments + displayName: AdditionalArguments (passed to PerfAutomation) + type: string + default: ' ' + +extends: + template: /eng/pipelines/templates/jobs/perf.yml + parameters: + TimeoutInMinutes: 720 + ServiceDirectory: core/corehttp + LanguageVersion: ${{ parameters.LanguageVersion }} + PackageVersions: ${{ parameters.PackageVersions }} + Tests: ${{ parameters.Tests }} + Arguments: ${{ parameters.Arguments }} + Iterations: ${{ parameters.Iterations }} + AdditionalArguments: ${{ parameters.AdditionalArguments }} \ No newline at end of file diff --git a/sdk/core/corehttp/tests/perf_tests/README.md b/sdk/core/corehttp/tests/perf_tests/README.md new file mode 100644 index 000000000000..278ecd358160 --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/README.md @@ -0,0 +1,87 @@ +# Core Python client performance tests + +In order to run the performance tests, the `devtools_testutils` package must be installed. This is done as part of the `dev_requirements.txt` installation. Start be creating a new virtual environment for your perf tests. This will need to be a Python 3 environment, preferably >=3.7. + +### Setup for test resources +The following environment variables will need to be set for the tests to access the live resources: + +``` +AZURE_STORAGE_CONN_STR= +AZURE_STORAGE_ACCOUNT_NAME= +AZURE_STORAGE_ACCOUNT_KEY= + +AZURE_STORAGE_CONTAINER_NAME= +AZURE_STORAGE_BLOBS_ENDPOINT= + +AZURE_STORAGE_TABLE_NAME= +AZURE_STORAGE_TABLES_ENDPOINT= +``` + +### Setup for perf test runs + +```cmd +(env) ~/core/corehttp> pip install -r dev_requirements.txt +(env) ~/core/corehttp> pip install . +``` + +## Test commands + +When `devtools_testutils` is installed, you will have access to the `perfstress` command line tool, which will scan the current module for runable perf tests. Only a specific test can be run at a time (i.e. there is no "run all" feature). + +```cmd +(env) ~/core/corehttp> cd tests +(env) ~/core/corehttp/tests> perfstress +``` + +Using the `perfstress` command alone will list the available perf tests found. + +### Tests + +The tests currently available: + +- `UploadBinaryDataTest` - Puts binary data of `size` in a Storage Blob (corresponds to the `upload_blob` Blob operation). +- `DownloadBinaryDataTest` - Gets binary data of `size` from a Storage Blob (corresponds to the `download_blob` Blob operation). +- `UpdateEntityJSONTest` - Puts JSON data of `size` in a Storage Table (corresponds to the `update_entity` Tables operation). +- `QueryEntitiesJSONTest` - Gets JSON data of `size` from a Storage Table (corresponds to the `query_entities` Tables operation). +- `ListEntitiesPageableTest` - Gets pageable data from a Storage Table (corresponds to the `list_entities` Tables operation). + +### Common perf command line options + +The `perfstress` framework has a series of common command line options built in. View them [here](https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/perfstress_tests.md#default-command-options). + +- `--sync` Whether to run the tests in sync or async. Default is False (async). +- `-d --duration=10` Number of seconds to run as many operations (the "run" function) as possible. Default is 10. +- `-i --iterations=1` Number of test iterations to run. Default is 1. +- `-p --parallel=1` Number of tests to run in parallel. Default is 1. +- `-w --warm-up=5` Number of seconds to spend warming up the connection before measuring begins. Default is 5. + +#### Core perf test common command line options + +The options that are available for all Core perf tests: + +- `--transport` - By default, uses AiohttpTransport ("aiohttp") for async. By default, uses RequestsTransport ("requests") for sync. All options: + - For async: + - `"aiohttp"`: AiohttpTransport (default) + - `"httpx"`: AsyncHttpXTransport + - For sync: + - `"requests"`: RequestsTransport (default) + - `"httpx"`: HttpXTransport +- `--use-entra-id` - Flag to pass in to use Microsoft Entra ID as the authentication. By default, set to False. +- `--size=10240` - Size of request content (in bytes). Defaults to 10240. (Not used by `ListEntitiesPageableTest`.) +- `--policies` - List of policies to pass in to the pipeline. Options: + - None: No extra policies passed in, except for authentication policy. This is the default. + - 'all': All policies added automatically by autorest. + - 'policy1,policy2': Comma-separated list of policies, such as 'RetryPolicy,UserAgentPolicy'" + +#### Additional ListEntitiesPageableTest command line options + +The options that are additionally available for `ListEntitiesPageableTest`: + +- `--count=100` - Number of table entities to list. Defaults to 100. +- `--page-size=None` - Maximum number of entities to list per page. Default is None, which will return all possible results per page. + +## Example command + +```cmd +(env) ~/core/corehttp> perfstress DownloadBinaryDataTest --use-entra-id --transport httpx --size=20480 --parallel=2 +``` diff --git a/sdk/core/corehttp/tests/perf_tests/__init__.py b/sdk/core/corehttp/tests/perf_tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/core/corehttp/tests/perf_tests/_test_base.py b/sdk/core/corehttp/tests/perf_tests/_test_base.py new file mode 100644 index 000000000000..427200104846 --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/_test_base.py @@ -0,0 +1,258 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +import os +import uuid +import string +import random + +from devtools_testutils.perfstress_tests import PerfStressTest + +from corehttp.runtime import PipelineClient, AsyncPipelineClient +from corehttp.runtime.pipeline import Pipeline, AsyncPipeline +from corehttp.transport.aiohttp import AioHttpTransport +from corehttp.transport.requests import RequestsTransport +from corehttp.transport.httpx import HttpXTransport, AsyncHttpXTransport +from corehttp.runtime.policies import ( + UserAgentPolicy, + HeadersPolicy, + ProxyPolicy, + NetworkTraceLoggingPolicy, + RetryPolicy, + AsyncRetryPolicy, + BearerTokenCredentialPolicy, + AsyncBearerTokenCredentialPolicy, +) +import corehttp.runtime.policies as policies +from corehttp.credentials import ServiceNamedKeyCredential +from corehttp.exceptions import ( + ClientAuthenticationError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, +) +from azure.identity import ClientSecretCredential +from azure.identity.aio import ClientSecretCredential as AsyncClientSecretCredential +from azure.data.tables.aio import TableClient + +from azure.storage.blob._shared.authentication import SharedKeyCredentialPolicy as BlobSharedKeyCredentialPolicy +from azure.data.tables._authentication import SharedKeyCredentialPolicy as TableSharedKeyCredentialPolicy + +_LETTERS = string.ascii_letters + + +class _ServiceTest(PerfStressTest): + transport = None + async_transport = None + + def __init__(self, arguments): + super().__init__(arguments) + self.account_name = self.get_from_env("AZURE_STORAGE_ACCOUNT_NAME") + self.account_key = self.get_from_env("AZURE_STORAGE_ACCOUNT_KEY") + async_transport_types = {"aiohttp": AioHttpTransport, "httpx": AsyncHttpXTransport} + sync_transport_types = {"requests": RequestsTransport, "httpx": HttpXTransport} + self.tenant_id = os.environ["COREHTTP_TENANT_ID"] + self.client_id = os.environ["COREHTTP_CLIENT_ID"] + self.client_secret = os.environ["COREHTTP_CLIENT_SECRET"] + self.storage_scope = "https://storage.azure.com/.default" + + # defaults transports + self.sync_transport = RequestsTransport + self.async_transport = AioHttpTransport + + # if transport is specified, use that + if self.args.transport: + # if sync, override sync default + if self.args.sync: + try: + self.sync_transport = sync_transport_types[self.args.transport] + except KeyError: + raise ValueError( + f"Invalid sync transport:{self.args.transport}\n Valid options are:\n- requests\n- httpx\n" + ) + # if async, override async default + else: + try: + self.async_transport = async_transport_types[self.args.transport] + except KeyError: + raise ValueError( + f"Invalid async transport:{self.args.transport}\n Valid options are:\n- aiohttp\n- httpx\n" + ) + + self.error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + + def _build_sync_pipeline_client(self, auth_policy): + default_policies = [ + UserAgentPolicy, + HeadersPolicy, + ProxyPolicy, + NetworkTraceLoggingPolicy, + RetryPolicy, + ] + + if self.args.policies is None: + # if None, only auth policy is passed in + sync_pipeline = Pipeline(transport=self.sync_transport(), policies=[auth_policy]) + elif self.args.policies == "all": + # if all, autorest default policies + auth policy + sync_policies = [auth_policy] + sync_policies.extend([policy(sdk_moniker=self.sdk_moniker) for policy in default_policies]) + sync_pipeline = Pipeline(transport=self.sync_transport(), policies=sync_policies) + else: + sync_policies = [auth_policy] + for p in self.args.policies.split(","): + try: + policy = getattr(policies, p) + except AttributeError as exc: + raise ValueError( + f"Corehttp has no policy named {exc.name}. Please use policies from the following list: {policies.__all__}" + ) from exc + sync_policies.append(policy(sdk_moniker=self.sdk_moniker)) + sync_pipeline = Pipeline(transport=self.sync_transport(), policies=sync_policies) + return PipelineClient(self.account_endpoint, pipeline=sync_pipeline) + + def _build_async_pipeline_client(self, auth_policy): + default_policies = [ + UserAgentPolicy, + HeadersPolicy, + ProxyPolicy, + NetworkTraceLoggingPolicy, + AsyncRetryPolicy, + ] + if self.args.policies is None: + # if None, only auth policy is passed in + async_pipeline = AsyncPipeline(transport=self.async_transport(), policies=[auth_policy]) + elif self.args.policies == "all": + # if all, autorest default policies + auth policy + async_policies = [auth_policy] + async_policies.extend([policy(sdk_moniker=self.sdk_moniker) for policy in default_policies]) + async_pipeline = AsyncPipeline(transport=self.async_transport(), policies=async_policies) + else: + async_policies = [auth_policy] + # if custom list of policies, pass in custom list + auth policy + for p in self.args.policies.split(","): + try: + policy = getattr(policies, p) + except AttributeError as exc: + raise ValueError( + f"Corehttp has no policy named {exc.name}. Please use policies from the following list: {policies.__all__}" + ) from exc + async_policies.append(policy(sdk_moniker=self.sdk_moniker)) + async_pipeline = AsyncPipeline(transport=self.async_transport(), policies=async_policies) + return AsyncPipelineClient(self.account_endpoint, pipeline=async_pipeline) + + def _set_auth_policies(self): + if not self.args.use_entra_id: + # if tables, create table credential policy, else blob policy + if "tables" in self.sdk_moniker: + self.sync_auth_policy = TableSharedKeyCredentialPolicy( + ServiceNamedKeyCredential(self.account_name, self.account_key) + ) + self.async_auth_policy = self.sync_auth_policy + else: + self.sync_auth_policy = BlobSharedKeyCredentialPolicy(self.account_name, self.account_key) + self.async_auth_policy = self.sync_auth_policy + else: + sync_credential = ClientSecretCredential(self.tenant_id, self.client_id, self.client_secret) + self.sync_auth_policy = BearerTokenCredentialPolicy(sync_credential, self.storage_scope) + async_credential = AsyncClientSecretCredential(self.tenant_id, self.client_id, self.client_secret) + self.async_auth_policy = AsyncBearerTokenCredentialPolicy(async_credential, self.storage_scope) + + @staticmethod + def add_arguments(parser): + super(_ServiceTest, _ServiceTest).add_arguments(parser) + parser.add_argument( + "--transport", + nargs="?", + type=str, + help="""Underlying HttpTransport type. Defaults to `aiohttp` if async, `requests` if sync. Other possible values for sync/async:\n""" + """ - `httpx`\n""", + default=None, + ) + parser.add_argument( + "-s", "--size", nargs="?", type=int, help="Size of data to transfer. Default is 10240.", default=10240 + ) + parser.add_argument( + "--policies", + nargs="?", + type=str, + help="""List of policies to pass in to the pipeline. Options:""" + """\n- None: No extra policies passed in, except for authentication policy. This is the default.""" + """\n- 'all': All policies added automatically by autorest.""" + """\n- 'policy1,policy2': Comma-separated list of policies, such as 'RetryPolicy,UserAgentPolicy'""", + default=None, + ) + parser.add_argument( + "--use-entra-id", action="store_true", help="Use Microsoft Entra ID authentication instead of shared key." + ) + + +class _BlobTest(_ServiceTest): + container_name = "perfstress-" + str(uuid.uuid4()) + + def __init__(self, arguments): + super().__init__(arguments) + self.account_endpoint = self.get_from_env("AZURE_STORAGE_BLOBS_ENDPOINT") + self.container_name = self.get_from_env("AZURE_STORAGE_CONTAINER_NAME") + self.api_version = "2021-12-02" + self.sdk_moniker = f"storage-blob/{self.api_version}" + + self._set_auth_policies() + self.pipeline_client = self._build_sync_pipeline_client(self.sync_auth_policy) + self.async_pipeline_client = self._build_async_pipeline_client(self.async_auth_policy) + + async def close(self): + self.pipeline_client.close() + await self.async_pipeline_client.close() + await super().close() + + +class _TableTest(_ServiceTest): + table_name = "".join(random.choice(_LETTERS) for i in range(30)) + + def __init__(self, arguments): + super().__init__(arguments) + self.account_endpoint = self.get_from_env("AZURE_STORAGE_TABLES_ENDPOINT") + self.api_version = "2019-02-02" + self.data_service_version = "3.0" + self.sdk_moniker = f"tables/{self.api_version}" + self._set_auth_policies() + + self.pipeline_client = self._build_sync_pipeline_client(self.sync_auth_policy) + self.async_pipeline_client = self._build_async_pipeline_client(self.async_auth_policy) + + self.connection_string = self.get_from_env("AZURE_STORAGE_CONN_STR") + self.async_table_client = TableClient.from_connection_string(self.connection_string, self.table_name) + + async def global_setup(self): + await super().global_setup() + await self.async_table_client.create_table() + + async def global_cleanup(self): + await self.async_table_client.delete_table() + + def get_base_entity(self, pk, rk, size): + # 227 is the length of the entity with Data of length 0 + base_entity_length = 227 + data_length = max(size - base_entity_length, 0) + # size = 227 + data_length + return { + "PartitionKey": pk, + "RowKey": rk, + "Data": "a" * data_length, + } + + def get_entity(self, rk=0): + return {"PartitionKey": "pk", "RowKey": str(rk), "Property1": f"a{rk}", "Property2": f"b{rk}"} + + async def close(self): + self.pipeline_client.close() + await self.async_pipeline_client.close() + await super().close() diff --git a/sdk/core/corehttp/tests/perf_tests/custom_iterator.py b/sdk/core/corehttp/tests/perf_tests/custom_iterator.py new file mode 100644 index 000000000000..392a2efbe2cb --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/custom_iterator.py @@ -0,0 +1,80 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from typing import ( + Any, + Callable, +) +from corehttp.paging import PageIterator, AsyncPageIterator + +NEXT_PARTITION_KEY = "x-ms-continuation-NextPartitionKey" +NEXT_ROW_KEY = "x-ms-continuation-NextRowKey" + + +class MockResponse: + pass + + +class CustomIterator(PageIterator): + def __init__(self, command: Callable, **kwargs: Any) -> None: + super(CustomIterator, self).__init__( + self._get_next_cb, + self._extract_data_cb, + continuation_token=kwargs.get("continuation_token"), + ) + self._command = command + self.page_size = kwargs.get("page_size") + + def _get_next_cb(self, continuation_token, **kwargs): # pylint: disable=inconsistent-return-statements + if not continuation_token: + next_partition_key = None + next_row_key = None + else: + next_partition_key = continuation_token.get("PartitionKey") + next_row_key = continuation_token.get("RowKey") + + return self._command(top=self.page_size, next_partition_key=next_partition_key, next_row_key=next_row_key) + + def _extract_data_cb(self, response): + next_entity = None + if response.headers and (NEXT_PARTITION_KEY in response.headers or NEXT_ROW_KEY in response.headers): + next_entity = { + "PartitionKey": response.headers[NEXT_PARTITION_KEY], + "RowKey": response.headers[NEXT_ROW_KEY], + } + + return next_entity, response.json()["value"] + + +class AsyncCustomIterator(AsyncPageIterator): + def __init__( + self, + command, + page_size=None, + continuation_token=None, + ): + super(AsyncCustomIterator, self).__init__( + get_next=self._get_next_cb, extract_data=self._extract_data_cb, continuation_token=continuation_token or "" + ) + self._command = command + self.page_size = page_size + + async def _get_next_cb(self, continuation_token, **kwargs): # pylint: disable=inconsistent-return-statements + if not continuation_token: + next_partition_key = None + next_row_key = None + else: + next_partition_key = continuation_token.get("PartitionKey") + next_row_key = continuation_token.get("RowKey") + + return await self._command(top=self.page_size, next_partition_key=next_partition_key, next_row_key=next_row_key) + + async def _extract_data_cb(self, response): + next_entity = None + if response.headers and (NEXT_PARTITION_KEY in response.headers or NEXT_ROW_KEY in response.headers): + next_entity = { + "PartitionKey": response.headers[NEXT_PARTITION_KEY], + "RowKey": response.headers[NEXT_ROW_KEY], + } + return next_entity, response.json()["value"] diff --git a/sdk/core/corehttp/tests/perf_tests/download_binary.py b/sdk/core/corehttp/tests/perf_tests/download_binary.py new file mode 100644 index 000000000000..5ffd6fb8905c --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/download_binary.py @@ -0,0 +1,94 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from time import time +from wsgiref.handlers import format_date_time +from devtools_testutils.perfstress_tests import get_random_bytes, WriteStream + +from corehttp.exceptions import ( + HttpResponseError, + map_error, +) +from corehttp.rest import HttpRequest +from azure.storage.blob._generated.operations._block_blob_operations import build_upload_request +from ._test_base import _BlobTest + + +class DownloadBinaryDataTest(_BlobTest): + def __init__(self, arguments): + super().__init__(arguments) + blob_name = "streamdownloadtest" + self.blob_endpoint = f"{self.account_endpoint}{self.container_name}/{blob_name}" + + async def global_setup(self): + await super().global_setup() + data = get_random_bytes(self.args.size) + current_time = format_date_time(time()) + request = build_upload_request( + url=self.blob_endpoint, + content=data, + content_length=self.args.size, + content_type="application/octet-stream", + headers={ + "x-ms-version": self.api_version, + "x-ms-date": current_time, + }, + ) + response = (await self.async_pipeline_client.pipeline.run(request, stream=False)).http_response + + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + def run_sync(self): + current_time = format_date_time(time()) + request = HttpRequest( + "GET", + self.blob_endpoint, + headers={ + "x-ms-version": self.api_version, + "Accept": "application/octet-stream", + "x-ms-date": current_time, + }, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + response = self.pipeline_client.pipeline.run( + request, + stream=True, + ).http_response + response.read() + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def run_async(self): + current_time = format_date_time(time()) + request = HttpRequest( + "GET", + self.blob_endpoint, + headers={ + "x-ms-version": self.api_version, + "Accept": "application/octet-stream", + "x-ms-date": current_time, + }, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + response = ( + await self.async_pipeline_client.pipeline.run( + request, + stream=True, + ) + ).http_response + await response.read() + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def close(self): + await super().close() diff --git a/sdk/core/corehttp/tests/perf_tests/list_entities_json.py b/sdk/core/corehttp/tests/perf_tests/list_entities_json.py new file mode 100644 index 000000000000..474e1c7dbeb7 --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/list_entities_json.py @@ -0,0 +1,135 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from time import time +from wsgiref.handlers import format_date_time +from urllib.parse import quote + +from corehttp.rest import HttpRequest +from corehttp.exceptions import ( + HttpResponseError, + map_error, +) +from corehttp.paging import ItemPaged, AsyncItemPaged + +from .custom_iterator import CustomIterator, AsyncCustomIterator +from ._test_base import _TableTest + + +class ListEntitiesPageableTest(_TableTest): + def __init__(self, arguments): + super().__init__(arguments) + self.url = f"{self.account_endpoint}{self.table_name}()" + + async def global_setup(self): + await super().global_setup() + batch_size = 0 + batch = [] + for row in range(self.args.count): + entity = self.get_entity(row) + batch.append(("upsert", entity)) + batch_size += 1 + if batch_size >= 100: + await self.async_table_client.submit_transaction(batch) + batch = [] + batch_size = 0 + if batch_size: + await self.async_table_client.submit_transaction(batch) + + def _get_list_entities(self, *, top=None, next_partition_key=None, next_row_key=None, **kwargs): + current_time = format_date_time(time()) + params = {} + if top: + params["$top"] = top + if next_partition_key: + params["NextPartitionKey"] = quote(next_partition_key) + if next_row_key: + params["NextRowKey"] = quote(next_row_key) + + request = HttpRequest( + method="GET", + url=self.url, + params=params, + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": self.data_service_version, + "Accept": "application/json;odata=minimalmetadata;", + "x-ms-date": current_time, + }, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + response = self.pipeline_client.pipeline.run(request).http_response + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + return response + + async def _get_list_entities_async(self, *, top=None, next_partition_key=None, next_row_key=None, **kwargs): + current_time = format_date_time(time()) + params = {} + if top: + params["$top"] = top + if next_partition_key: + params["NextPartitionKey"] = quote(next_partition_key) + if next_row_key: + params["NextRowKey"] = quote(next_row_key) + + request = HttpRequest( + method="GET", + url=self.url, + params=params, + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": self.data_service_version, + "Accept": "application/json;odata=minimalmetadata;", + "x-ms-date": current_time, + }, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + response = (await self.async_pipeline_client.pipeline.run(request)).http_response + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + return response + + def run_sync(self): + for _ in ItemPaged( + self._get_list_entities, + page_iterator_class=CustomIterator, + page_size=self.args.page_size, + ): + pass + + async def run_async(self): + async for _ in AsyncItemPaged( + self._get_list_entities_async, + page_iterator_class=AsyncCustomIterator, + page_size=self.args.page_size, + ): + pass + + async def close(self): + await self.async_table_client.close() + await super().close() + + @staticmethod + def add_arguments(parser): + super(ListEntitiesPageableTest, ListEntitiesPageableTest).add_arguments(parser) + parser.add_argument( + "--page-size", + nargs="?", + type=int, + help="""Max number of entities to list per page. """ + """Default is None, which will return all possible results per page.""", + default=None, + ) + parser.add_argument( + "-c", "--count", nargs="?", type=int, help="Number of table entities to list. Defaults to 100", default=100 + ) diff --git a/sdk/core/corehttp/tests/perf_tests/query_entities_json.py b/sdk/core/corehttp/tests/perf_tests/query_entities_json.py new file mode 100644 index 000000000000..a10e8678f5ba --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/query_entities_json.py @@ -0,0 +1,85 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +from time import time +from wsgiref.handlers import format_date_time +from urllib.parse import quote + +from corehttp.rest import HttpRequest +from corehttp.exceptions import ( + HttpResponseError, + map_error, +) +from ._test_base import _TableTest + + +class QueryEntitiesJSONTest(_TableTest): + def __init__(self, arguments): + super().__init__(arguments) + # query params + self.select = quote("Property2") + self.filter = quote("Property1 eq 'a'") + self.url = f"{self.account_endpoint}{self.table_name}" + + async def global_setup(self): + await super().global_setup() + # create entity to be queried + entity = self.get_entity() + await self.async_table_client.create_entity(entity) + + def run_sync(self): + current_time = format_date_time(time()) + request = HttpRequest( + method="GET", + url=self.url, + params={"$select": self.select, "$filter": self.filter}, + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": self.data_service_version, + "Content-Type": "application/json", + "Accept": "application/json", + "x-ms-date": current_time, + }, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {"$select": self.select, "$filter": self.filter} + response = self.pipeline_client.pipeline.run( + request, + ).http_response + response.json() + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def run_async(self): + current_time = format_date_time(time()) + request = HttpRequest( + method="GET", + url=self.url, + params={"$select": self.select, "$filter": self.filter}, + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": self.data_service_version, + "Content-Type": "application/json", + "Accept": "application/json", + "x-ms-date": current_time, + }, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {"$select": self.select, "$filter": self.filter} + response = ( + await self.async_pipeline_client.pipeline.run( + request, + ) + ).http_response + response.json() + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def close(self): + await self.async_table_client.close() + await super().close() diff --git a/sdk/core/corehttp/tests/perf_tests/update_entity_json.py b/sdk/core/corehttp/tests/perf_tests/update_entity_json.py new file mode 100644 index 000000000000..91f10e9506b6 --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/update_entity_json.py @@ -0,0 +1,92 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- +import uuid +from time import time +from wsgiref.handlers import format_date_time + +from corehttp.rest import HttpRequest +from corehttp.exceptions import ( + HttpResponseError, + map_error, +) +from ._test_base import _TableTest + + +class UpdateEntityJSONTest(_TableTest): + partition_key = str(uuid.uuid4()) + row_key = str(uuid.uuid4()) + + def __init__(self, arguments): + super().__init__(arguments) + # base entity + self.base_entity = self.get_base_entity( + UpdateEntityJSONTest.partition_key, UpdateEntityJSONTest.row_key, self.args.size + ) + self.url = f"{self.account_endpoint}{self.table_name}(PartitionKey='{UpdateEntityJSONTest.partition_key}',RowKey='{UpdateEntityJSONTest.row_key}')" + + async def global_setup(self): + await super().global_setup() + # create entity to be updated + await self.async_table_client.create_entity(self.base_entity) + + def run_sync(self): + current_time = format_date_time(time()) + request = HttpRequest( + method="PUT", + url=self.url, + params={}, + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": self.data_service_version, + "Content-Type": "application/json", + "Accept": "application/json", + "If-Match": "*", + "x-ms-date": current_time, + }, + json=self.base_entity, + content=None, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + response = self.pipeline_client.pipeline.run( + request, + ).http_response + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def run_async(self): + current_time = format_date_time(time()) + request = HttpRequest( + method="PUT", + url=self.url, + params={}, + headers={ + "x-ms-version": self.api_version, + "DataServiceVersion": self.data_service_version, + "Content-Type": "application/json", + "Accept": "application/json", + "If-Match": "*", + "x-ms-date": current_time, + }, + json=self.base_entity, + content=None, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + response = ( + await self.async_pipeline_client.pipeline.run( + request, + ) + ).http_response + if response.status_code not in [204]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def close(self): + await self.async_table_client.close() + await super().close() diff --git a/sdk/core/corehttp/tests/perf_tests/upload_binary.py b/sdk/core/corehttp/tests/perf_tests/upload_binary.py new file mode 100644 index 000000000000..2603af67b310 --- /dev/null +++ b/sdk/core/corehttp/tests/perf_tests/upload_binary.py @@ -0,0 +1,84 @@ +# -------------------------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------------------------- + +from time import time +from wsgiref.handlers import format_date_time +from devtools_testutils.perfstress_tests import RandomStream, AsyncRandomStream + +from corehttp.rest import HttpRequest +from corehttp.exceptions import ( + HttpResponseError, + map_error, +) +from ._test_base import _BlobTest + +import logging +import sys + +handler = logging.StreamHandler(stream=sys.stdout) +logger = logging.getLogger("corehttp") +logger.setLevel(logging.DEBUG) +logger.addHandler(handler) + + +class UploadBinaryDataTest(_BlobTest): + def __init__(self, arguments): + super().__init__(arguments) + blob_name = "uploadtest" + self.blob_endpoint = f"{self.account_endpoint}{self.container_name}/{blob_name}" + self.upload_stream = RandomStream(self.args.size) + self.upload_stream_async = AsyncRandomStream(self.args.size) + + def run_sync(self): + self.upload_stream.reset() + current_time = format_date_time(time()) + request = HttpRequest( + method="PUT", + url=self.blob_endpoint, + params={}, + headers={ + "x-ms-date": current_time, + "x-ms-blob-type": "BlockBlob", + "Content-Length": str(self.args.size), + "x-ms-version": self.api_version, + "Content-Type": "application/octet-stream", + }, + content=self.upload_stream, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + response = self.pipeline_client.pipeline.run(request).http_response + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def run_async(self): + self.upload_stream_async.reset() + current_time = format_date_time(time()) + request = HttpRequest( + method="PUT", + url=self.blob_endpoint, + params={}, + headers={ + "x-ms-date": current_time, + "x-ms-blob-type": "BlockBlob", + "Content-Length": str(self.args.size), + "x-ms-version": self.api_version, + "Content-Type": "application/octet-stream", + }, + content=self.upload_stream_async, + ) + # Many policies in Azure SDKs use the backcompatible attribute `query` on HttpRequest. This is not present in corehttp.HttpRequest, so we add it manually to make + # Azure SDK policies work with corehttp. + request.query = {} + pipeline_response = await self.async_pipeline_client.pipeline.run(request) + response = pipeline_response.http_response + if response.status_code not in [201]: + map_error(status_code=response.status_code, response=response, error_map=self.error_map) + raise HttpResponseError(response=response) + + async def close(self): + await super().close()