diff --git a/sdk/storage/azure-mgmt-storageimportexport/CHANGELOG.md b/sdk/storage/azure-mgmt-storageimportexport/CHANGELOG.md new file mode 100644 index 000000000000..578ed6acf479 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 0.1.0 (1970-01-01) + +* Initial Release diff --git a/sdk/storage/azure-mgmt-storageimportexport/MANIFEST.in b/sdk/storage/azure-mgmt-storageimportexport/MANIFEST.in new file mode 100644 index 000000000000..3a9b6517412b --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/MANIFEST.in @@ -0,0 +1,6 @@ +include _meta.json +recursive-include tests *.py *.yaml +include *.md +include azure/__init__.py +include azure/mgmt/__init__.py + diff --git a/sdk/storage/azure-mgmt-storageimportexport/README.md b/sdk/storage/azure-mgmt-storageimportexport/README.md new file mode 100644 index 000000000000..89e67d0f8228 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/README.md @@ -0,0 +1,27 @@ +# Microsoft Azure SDK for Python + +This is the Microsoft Azure MyService Management Client Library. +This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). + + +# Usage + + +To learn how to use this package, see the [quickstart guide](https://aka.ms/azsdk/python/mgmt) + + + +For docs and references, see [Python SDK References](https://docs.microsoft.com/python/api/overview/azure/) +Code samples for this package can be found at [MyService Management](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com. +Additional code samples for different Azure services are available at [Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + + +# Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-mgmt-storageimportexport%2FREADME.png) diff --git a/sdk/storage/azure-mgmt-storageimportexport/_meta.json b/sdk/storage/azure-mgmt-storageimportexport/_meta.json new file mode 100644 index 000000000000..935a2d1825ea --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/_meta.json @@ -0,0 +1,11 @@ +{ + "autorest": "3.4.2", + "use": [ + "@autorest/python@5.8.0", + "@autorest/modelerfour@4.19.2" + ], + "commit": "ebe9f9ff9ebd2796ac7163ffa586b9cdb2858c62", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/storageimportexport/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.0 --use=@autorest/modelerfour@4.19.2 --version=3.4.2", + "readme": "specification/storageimportexport/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/__init__.py b/sdk/storage/azure-mgmt-storageimportexport/azure/__init__.py new file mode 100644 index 000000000000..0260537a02bb --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/__init__.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/__init__.py new file mode 100644 index 000000000000..0260537a02bb --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/__init__.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/__init__.py new file mode 100644 index 000000000000..3573ec61388d --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._storage_import_export import StorageImportExport +from ._version import VERSION + +__version__ = VERSION +__all__ = ['StorageImportExport'] + +try: + from ._patch import patch_sdk # type: ignore + patch_sdk() +except ImportError: + pass diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_configuration.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_configuration.py new file mode 100644 index 000000000000..bc668f91b24f --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_configuration.py @@ -0,0 +1,75 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + + +class StorageImportExportConfiguration(Configuration): + """Configuration for StorageImportExport. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + accept_language=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StorageImportExportConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.accept_language = accept_language + self.api_version = "2021-01-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-storageimportexport/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_metadata.json b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_metadata.json new file mode 100644 index 000000000000..42053ef5ac93 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_metadata.json @@ -0,0 +1,118 @@ +{ + "chosen_version": "2021-01-01", + "total_api_version_list": ["2021-01-01"], + "client": { + "name": "StorageImportExport", + "filename": "_storage_import_export", + "description": "The Storage Import/Export Resource Provider API.", + "base_url": "\u0027https://management.azure.com\u0027", + "custom_base_url": null, + "azure_arm": true, + "has_lro_operations": false, + "client_side_validation": false, + "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StorageImportExportConfiguration\"]}}}", + "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"StorageImportExportConfiguration\"]}}}" + }, + "global_parameters": { + "sync": { + "credential": { + "signature": "credential, # type: \"TokenCredential\"", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials.TokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id, # type: str", + "description": "The subscription ID for the Azure user.", + "docstring_type": "str", + "required": true + }, + "accept_language": { + "signature": "accept_language=None, # type: Optional[str]", + "description": "Specifies the preferred language for the response.", + "docstring_type": "str", + "required": false + } + }, + "async": { + "credential": { + "signature": "credential: \"AsyncTokenCredential\",", + "description": "Credential needed for the client to connect to Azure.", + "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", + "required": true + }, + "subscription_id": { + "signature": "subscription_id: str,", + "description": "The subscription ID for the Azure user.", + "docstring_type": "str", + "required": true + }, + "accept_language": { + "signature": "accept_language: Optional[str] = None,", + "description": "Specifies the preferred language for the response.", + "docstring_type": "str", + "required": false + } + }, + "constant": { + }, + "call": "credential, subscription_id, accept_language", + "service_client_specific": { + "sync": { + "api_version": { + "signature": "api_version=None, # type: Optional[str]", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url=None, # type: Optional[str]", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile=KnownProfiles.default, # type: KnownProfiles", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + }, + "async": { + "api_version": { + "signature": "api_version: Optional[str] = None,", + "description": "API version to use if no profile is provided, or if missing in profile.", + "docstring_type": "str", + "required": false + }, + "base_url": { + "signature": "base_url: Optional[str] = None,", + "description": "Service URL", + "docstring_type": "str", + "required": false + }, + "profile": { + "signature": "profile: KnownProfiles = KnownProfiles.default,", + "description": "A profile definition, from KnownProfiles to dict.", + "docstring_type": "azure.profiles.KnownProfiles", + "required": false + } + } + } + }, + "config": { + "credential": true, + "credential_scopes": ["https://management.azure.com/.default"], + "credential_default_policy_type": "BearerTokenCredentialPolicy", + "credential_default_policy_type_has_async_version": true, + "credential_key_header_name": null, + "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", + "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" + }, + "operation_groups": { + "locations": "LocationsOperations", + "jobs": "JobsOperations", + "bit_locker_keys": "BitLockerKeysOperations", + "operations": "Operations" + } +} \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_storage_import_export.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_storage_import_export.py new file mode 100644 index 000000000000..d05fda135d1f --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_storage_import_export.py @@ -0,0 +1,106 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import TYPE_CHECKING + +from azure.mgmt.core import ARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Optional + + from azure.core.credentials import TokenCredential + from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from ._configuration import StorageImportExportConfiguration +from .operations import LocationsOperations +from .operations import JobsOperations +from .operations import BitLockerKeysOperations +from .operations import Operations +from . import models + + +class StorageImportExport(object): + """The Storage Import/Export Resource Provider API. + + :ivar locations: LocationsOperations operations + :vartype locations: storage_import_export.operations.LocationsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: storage_import_export.operations.JobsOperations + :ivar bit_locker_keys: BitLockerKeysOperations operations + :vartype bit_locker_keys: storage_import_export.operations.BitLockerKeysOperations + :ivar operations: Operations operations + :vartype operations: storage_import_export.operations.Operations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + :param str base_url: Service URL + """ + + def __init__( + self, + credential, # type: "TokenCredential" + subscription_id, # type: str + accept_language=None, # type: Optional[str] + base_url=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if not base_url: + base_url = 'https://management.azure.com' + self._config = StorageImportExportConfiguration(credential, subscription_id, accept_language, **kwargs) + self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.locations = LocationsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.bit_locker_keys = BitLockerKeysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + + def _send_request(self, http_request, **kwargs): + # type: (HttpRequest, Any) -> HttpResponse + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.HttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> StorageImportExport + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_version.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_version.py new file mode 100644 index 000000000000..e5754a47ce68 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/__init__.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/__init__.py new file mode 100644 index 000000000000..9dacfa50491b --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._storage_import_export import StorageImportExport +__all__ = ['StorageImportExport'] diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/_configuration.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/_configuration.py new file mode 100644 index 000000000000..6790d64ebbcc --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/_configuration.py @@ -0,0 +1,71 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + + +class StorageImportExportConfiguration(Configuration): + """Configuration for StorageImportExport. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + accept_language: Optional[str] = None, + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + super(StorageImportExportConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.subscription_id = subscription_id + self.accept_language = accept_language + self.api_version = "2021-01-01" + self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) + kwargs.setdefault('sdk_moniker', 'mgmt-storageimportexport/{}'.format(VERSION)) + self._configure(**kwargs) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/_storage_import_export.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/_storage_import_export.py new file mode 100644 index 000000000000..d896a7bdb757 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/_storage_import_export.py @@ -0,0 +1,99 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core import AsyncARMPipelineClient +from msrest import Deserializer, Serializer + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from azure.core.credentials_async import AsyncTokenCredential + +from ._configuration import StorageImportExportConfiguration +from .operations import LocationsOperations +from .operations import JobsOperations +from .operations import BitLockerKeysOperations +from .operations import Operations +from .. import models + + +class StorageImportExport(object): + """The Storage Import/Export Resource Provider API. + + :ivar locations: LocationsOperations operations + :vartype locations: storage_import_export.aio.operations.LocationsOperations + :ivar jobs: JobsOperations operations + :vartype jobs: storage_import_export.aio.operations.JobsOperations + :ivar bit_locker_keys: BitLockerKeysOperations operations + :vartype bit_locker_keys: storage_import_export.aio.operations.BitLockerKeysOperations + :ivar operations: Operations operations + :vartype operations: storage_import_export.aio.operations.Operations + :param credential: Credential needed for the client to connect to Azure. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The subscription ID for the Azure user. + :type subscription_id: str + :param accept_language: Specifies the preferred language for the response. + :type accept_language: str + :param str base_url: Service URL + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + accept_language: Optional[str] = None, + base_url: Optional[str] = None, + **kwargs: Any + ) -> None: + if not base_url: + base_url = 'https://management.azure.com' + self._config = StorageImportExportConfiguration(credential, subscription_id, accept_language, **kwargs) + self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._serialize.client_side_validation = False + self._deserialize = Deserializer(client_models) + + self.locations = LocationsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.jobs = JobsOperations( + self._client, self._config, self._serialize, self._deserialize) + self.bit_locker_keys = BitLockerKeysOperations( + self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations( + self._client, self._config, self._serialize, self._deserialize) + + async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + """Runs the network request through the client's chained policies. + + :param http_request: The network request you want to make. Required. + :type http_request: ~azure.core.pipeline.transport.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + """ + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + http_request.url = self._client.format_url(http_request.url, **path_format_arguments) + stream = kwargs.pop("stream", True) + pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) + return pipeline_response.http_response + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "StorageImportExport": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/__init__.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/__init__.py new file mode 100644 index 000000000000..e1f9bbe57f6c --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._locations_operations import LocationsOperations +from ._jobs_operations import JobsOperations +from ._bit_locker_keys_operations import BitLockerKeysOperations +from ._operations import Operations + +__all__ = [ + 'LocationsOperations', + 'JobsOperations', + 'BitLockerKeysOperations', + 'Operations', +] diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_bit_locker_keys_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_bit_locker_keys_operations.py new file mode 100644 index 000000000000..19a5fdbcd826 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_bit_locker_keys_operations.py @@ -0,0 +1,120 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class BitLockerKeysOperations: + """BitLockerKeysOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + job_name: str, + resource_group_name: str, + **kwargs: Any + ) -> AsyncIterable["_models.GetBitLockerKeysResponse"]: + """Returns the BitLocker Keys for all drives in the specified job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GetBitLockerKeysResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.GetBitLockerKeysResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GetBitLockerKeysResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('GetBitLockerKeysResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}/listBitLockerKeys'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_jobs_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_jobs_operations.py new file mode 100644 index 000000000000..48cfa7354898 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_jobs_operations.py @@ -0,0 +1,479 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class JobsOperations: + """JobsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_subscription( + self, + top: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ListJobsResponse"]: + """Returns all active and completed jobs in a subscription. + + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: long + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'long') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name: str, + top: Optional[int] = None, + filter: Optional[str] = None, + **kwargs: Any + ) -> AsyncIterable["_models.ListJobsResponse"]: + """Returns all active and completed jobs in a resource group. + + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: long + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'long') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + async def get( + self, + job_name: str, + resource_group_name: str, + **kwargs: Any + ) -> "_models.JobResponse": + """Gets information about an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + async def update( + self, + job_name: str, + resource_group_name: str, + body: "_models.UpdateJobParameters", + **kwargs: Any + ) -> "_models.JobResponse": + """Updates specific properties of a job. You can call this operation to notify the Import/Export + service that the hard drives comprising the import or export job have been shipped to the + Microsoft data center. It can also be used to cancel an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters to update in the job. + :type body: ~storage_import_export.models.UpdateJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'UpdateJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + async def create( + self, + job_name: str, + resource_group_name: str, + body: "_models.PutJobParameters", + client_tenant_id: Optional[str] = None, + **kwargs: Any + ) -> "_models.JobResponse": + """Creates a new job or updates an existing job in the specified subscription. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters used for creating the job. + :type body: ~storage_import_export.models.PutJobParameters + :param client_tenant_id: The tenant ID of the client making the request. + :type client_tenant_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + if client_tenant_id is not None: + header_parameters['x-ms-client-tenant-id'] = self._serialize.header("client_tenant_id", client_tenant_id, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'PutJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + async def delete( + self, + job_name: str, + resource_group_name: str, + **kwargs: Any + ) -> None: + """Deletes an existing job. Only jobs in the Creating or Completed states can be deleted. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_locations_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_locations_operations.py new file mode 100644 index 000000000000..8ed3f9813cea --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_locations_operations.py @@ -0,0 +1,165 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class LocationsOperations: + """LocationsOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.LocationsResponse"]: + """Returns a list of locations to which you can ship the disks associated with an import or export + job. A location is a Microsoft data center region. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LocationsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.LocationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LocationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('LocationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/locations'} # type: ignore + + async def get( + self, + location_name: str, + **kwargs: Any + ) -> "_models.Location": + """Returns the details about a location to which you can ship the disks associated with an import + or export job. A location is an Azure region. + + :param location_name: The name of the location. For example, West US or westus. + :type location_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Location, or the result of cls(response) + :rtype: ~storage_import_export.models.Location + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Location"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'locationName': self._serialize.url("location_name", location_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Location', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/providers/Microsoft.ImportExport/locations/{locationName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_operations.py new file mode 100644 index 000000000000..d5fd8a8c7c8a --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/aio/operations/_operations.py @@ -0,0 +1,107 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class Operations: + """Operations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs: Any + ) -> AsyncIterable["_models.ListOperationsResponse"]: + """Returns the list of operations supported by the import/export resource provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListOperationsResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~storage_import_export.models.ListOperationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListOperationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('ListOperationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/operations'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/__init__.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/__init__.py new file mode 100644 index 000000000000..fc9e97de94a4 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/__init__.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import DeliveryPackageInformation + from ._models_py3 import DriveBitLockerKey + from ._models_py3 import DriveStatus + from ._models_py3 import EncryptionKeyDetails + from ._models_py3 import ErrorResponse + from ._models_py3 import ErrorResponseErrorDetailsItem + from ._models_py3 import Export + from ._models_py3 import GetBitLockerKeysResponse + from ._models_py3 import IdentityDetails + from ._models_py3 import JobDetails + from ._models_py3 import JobResponse + from ._models_py3 import ListJobsResponse + from ._models_py3 import ListOperationsResponse + from ._models_py3 import Location + from ._models_py3 import LocationsResponse + from ._models_py3 import Operation + from ._models_py3 import PackageInformation + from ._models_py3 import PutJobParameters + from ._models_py3 import ReturnAddress + from ._models_py3 import ReturnShipping + from ._models_py3 import ShippingInformation + from ._models_py3 import SystemData + from ._models_py3 import UpdateJobParameters +except (SyntaxError, ImportError): + from ._models import DeliveryPackageInformation # type: ignore + from ._models import DriveBitLockerKey # type: ignore + from ._models import DriveStatus # type: ignore + from ._models import EncryptionKeyDetails # type: ignore + from ._models import ErrorResponse # type: ignore + from ._models import ErrorResponseErrorDetailsItem # type: ignore + from ._models import Export # type: ignore + from ._models import GetBitLockerKeysResponse # type: ignore + from ._models import IdentityDetails # type: ignore + from ._models import JobDetails # type: ignore + from ._models import JobResponse # type: ignore + from ._models import ListJobsResponse # type: ignore + from ._models import ListOperationsResponse # type: ignore + from ._models import Location # type: ignore + from ._models import LocationsResponse # type: ignore + from ._models import Operation # type: ignore + from ._models import PackageInformation # type: ignore + from ._models import PutJobParameters # type: ignore + from ._models import ReturnAddress # type: ignore + from ._models import ReturnShipping # type: ignore + from ._models import ShippingInformation # type: ignore + from ._models import SystemData # type: ignore + from ._models import UpdateJobParameters # type: ignore + +from ._storage_import_export_enums import ( + CreatedByType, + DriveState, + EncryptionKekType, + IdentityType, +) + +__all__ = [ + 'DeliveryPackageInformation', + 'DriveBitLockerKey', + 'DriveStatus', + 'EncryptionKeyDetails', + 'ErrorResponse', + 'ErrorResponseErrorDetailsItem', + 'Export', + 'GetBitLockerKeysResponse', + 'IdentityDetails', + 'JobDetails', + 'JobResponse', + 'ListJobsResponse', + 'ListOperationsResponse', + 'Location', + 'LocationsResponse', + 'Operation', + 'PackageInformation', + 'PutJobParameters', + 'ReturnAddress', + 'ReturnShipping', + 'ShippingInformation', + 'SystemData', + 'UpdateJobParameters', + 'CreatedByType', + 'DriveState', + 'EncryptionKekType', + 'IdentityType', +] diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_models.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_models.py new file mode 100644 index 000000000000..85835b038804 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_models.py @@ -0,0 +1,970 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class DeliveryPackageInformation(msrest.serialization.Model): + """Contains information about the delivery package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: The number of drives included in the package. + :type drive_count: long + :param ship_date: The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'long'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DeliveryPackageInformation, self).__init__(**kwargs) + self.carrier_name = kwargs['carrier_name'] + self.tracking_number = kwargs['tracking_number'] + self.drive_count = kwargs.get('drive_count', None) + self.ship_date = kwargs.get('ship_date', None) + + +class DriveBitLockerKey(msrest.serialization.Model): + """BitLocker recovery key or password to the specified drive. + + :param bit_locker_key: BitLocker recovery key or password. + :type bit_locker_key: str + :param drive_id: Drive ID. + :type drive_id: str + """ + + _attribute_map = { + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'drive_id': {'key': 'driveId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(DriveBitLockerKey, self).__init__(**kwargs) + self.bit_locker_key = kwargs.get('bit_locker_key', None) + self.drive_id = kwargs.get('drive_id', None) + + +class DriveStatus(msrest.serialization.Model): + """Provides information about the drive's status. + + :param drive_id: The drive's hardware serial number, without spaces. + :type drive_id: str + :param bit_locker_key: The BitLocker key used to encrypt the drive. + :type bit_locker_key: str + :param manifest_file: The relative path of the manifest file on the drive. + :type manifest_file: str + :param manifest_hash: The Base16-encoded MD5 hash of the manifest file on the drive. + :type manifest_hash: str + :param drive_header_hash: The drive header hash value. + :type drive_header_hash: str + :param state: The drive's current state. Possible values include: "Specified", "Received", + "NeverReceived", "Transferring", "Completed", "CompletedMoreInfo", "ShippedBack". + :type state: str or ~storage_import_export.models.DriveState + :param copy_status: Detailed status about the data transfer process. This field is not returned + in the response until the drive is in the Transferring state. + :type copy_status: str + :param percent_complete: Percentage completed for the drive. + :type percent_complete: long + :param verbose_log_uri: A URI that points to the blob containing the verbose log for the data + transfer operation. + :type verbose_log_uri: str + :param error_log_uri: A URI that points to the blob containing the error log for the data + transfer operation. + :type error_log_uri: str + :param manifest_uri: A URI that points to the blob containing the drive manifest file. + :type manifest_uri: str + :param bytes_succeeded: Bytes successfully transferred for the drive. + :type bytes_succeeded: long + """ + + _attribute_map = { + 'drive_id': {'key': 'driveId', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'drive_header_hash': {'key': 'driveHeaderHash', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'copy_status': {'key': 'copyStatus', 'type': 'str'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'long'}, + 'verbose_log_uri': {'key': 'verboseLogUri', 'type': 'str'}, + 'error_log_uri': {'key': 'errorLogUri', 'type': 'str'}, + 'manifest_uri': {'key': 'manifestUri', 'type': 'str'}, + 'bytes_succeeded': {'key': 'bytesSucceeded', 'type': 'long'}, + } + + def __init__( + self, + **kwargs + ): + super(DriveStatus, self).__init__(**kwargs) + self.drive_id = kwargs.get('drive_id', None) + self.bit_locker_key = kwargs.get('bit_locker_key', None) + self.manifest_file = kwargs.get('manifest_file', None) + self.manifest_hash = kwargs.get('manifest_hash', None) + self.drive_header_hash = kwargs.get('drive_header_hash', None) + self.state = kwargs.get('state', None) + self.copy_status = kwargs.get('copy_status', None) + self.percent_complete = kwargs.get('percent_complete', None) + self.verbose_log_uri = kwargs.get('verbose_log_uri', None) + self.error_log_uri = kwargs.get('error_log_uri', None) + self.manifest_uri = kwargs.get('manifest_uri', None) + self.bytes_succeeded = kwargs.get('bytes_succeeded', None) + + +class EncryptionKeyDetails(msrest.serialization.Model): + """Specifies the encryption key properties. + + :param kek_type: The type of kek encryption key. Possible values include: "MicrosoftManaged", + "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~storage_import_export.models.EncryptionKekType + :param kek_url: Specifies the url for kek encryption key. + :type kek_url: str + :param kek_vault_resource_id: Specifies the keyvault resource id for kek encryption key. + :type kek_vault_resource_id: str + """ + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EncryptionKeyDetails, self).__init__(**kwargs) + self.kek_type = kwargs.get('kek_type', "MicrosoftManaged") + self.kek_url = kwargs.get('kek_url', None) + self.kek_vault_resource_id = kwargs.get('kek_vault_resource_id', None) + + +class ErrorResponse(msrest.serialization.Model): + """Response when errors occurred. + + :param code: Provides information about the error code. + :type code: str + :param message: Provides information about the error message. + :type message: str + :param target: Provides information about the error target. + :type target: str + :param details: Describes the error details if present. + :type details: list[~storage_import_export.models.ErrorResponseErrorDetailsItem] + :param innererror: Inner error object if present. + :type innererror: any + """ + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[ErrorResponseErrorDetailsItem]'}, + 'innererror': {'key': 'error.innererror', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + self.innererror = kwargs.get('innererror', None) + + +class ErrorResponseErrorDetailsItem(msrest.serialization.Model): + """ErrorResponseErrorDetailsItem. + + :param code: Provides information about the error code. + :type code: str + :param target: Provides information about the error target. + :type target: str + :param message: Provides information about the error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponseErrorDetailsItem, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.target = kwargs.get('target', None) + self.message = kwargs.get('message', None) + + +class Export(msrest.serialization.Model): + """A property containing information about the blobs to be exported for an export job. This property is required for export jobs, but must not be specified for import jobs. + + :param blob_list_blob_path: The relative URI to the block blob that contains the list of blob + paths or blob path prefixes as defined above, beginning with the container name. If the blob is + in root container, the URI must begin with $root. + :type blob_list_blob_path: str + :param blob_path: A collection of blob-path strings. + :type blob_path: list[str] + :param blob_path_prefix: A collection of blob-prefix strings. + :type blob_path_prefix: list[str] + """ + + _attribute_map = { + 'blob_list_blob_path': {'key': 'blobListBlobPath', 'type': 'str'}, + 'blob_path': {'key': 'blobList.blobPath', 'type': '[str]'}, + 'blob_path_prefix': {'key': 'blobList.blobPathPrefix', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(Export, self).__init__(**kwargs) + self.blob_list_blob_path = kwargs.get('blob_list_blob_path', None) + self.blob_path = kwargs.get('blob_path', None) + self.blob_path_prefix = kwargs.get('blob_path_prefix', None) + + +class GetBitLockerKeysResponse(msrest.serialization.Model): + """GetBitLockerKeys response. + + :param value: drive status. + :type value: list[~storage_import_export.models.DriveBitLockerKey] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DriveBitLockerKey]'}, + } + + def __init__( + self, + **kwargs + ): + super(GetBitLockerKeysResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class IdentityDetails(msrest.serialization.Model): + """Specifies the identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: The type of identity. Possible values include: "None", "SystemAssigned", + "UserAssigned". Default value: "None". + :type type: str or ~storage_import_export.models.IdentityType + :ivar principal_id: Specifies the principal id for the identity for the job. + :vartype principal_id: str + :ivar tenant_id: Specifies the tenant id for the identity for the job. + :vartype tenant_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IdentityDetails, self).__init__(**kwargs) + self.type = kwargs.get('type', "None") + self.principal_id = None + self.tenant_id = None + + +class JobDetails(msrest.serialization.Model): + """Specifies the job properties. + + :param storage_account_id: The resource identifier of the storage account where data will be + imported to or exported from. + :type storage_account_id: str + :param job_type: The type of job. + :type job_type: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param shipping_information: Contains information about the Microsoft datacenter to which the + drives should be shipped. + :type shipping_information: ~storage_import_export.models.ShippingInformation + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param return_package: Contains information about the package being shipped from the Microsoft + data center to the customer to return the drives. The format is the same as the deliveryPackage + property above. This property is not included if the drives have not yet been returned. + :type return_package: ~storage_import_export.models.PackageInformation + :param diagnostics_path: The virtual blob directory to which the copy logs and backups of drive + manifest files (if enabled) will be stored. + :type diagnostics_path: str + :param log_level: Default value is Error. Indicates whether error logging or verbose logging + will be enabled. + :type log_level: str + :param backup_drive_manifest: Default value is false. Indicates whether the manifest files on + the drives should be copied to block blobs. + :type backup_drive_manifest: bool + :param state: Current state of the job. + :type state: str + :param cancel_requested: Indicates whether a request has been submitted to cancel the job. + :type cancel_requested: bool + :param percent_complete: Overall percentage completed for the job. + :type percent_complete: long + :param incomplete_blob_list_uri: A blob path that points to a block blob containing a list of + blob names that were not exported due to insufficient drive space. If all blobs were exported + successfully, then this element is not included in the response. + :type incomplete_blob_list_uri: str + :param drive_list: List of up to ten drives that comprise the job. The drive list is a required + element for an import job; it is not specified for export jobs. + :type drive_list: list[~storage_import_export.models.DriveStatus] + :param export: A property containing information about the blobs to be exported for an export + job. This property is included for export jobs only. + :type export: ~storage_import_export.models.Export + :param provisioning_state: Specifies the provisioning state of the job. + :type provisioning_state: str + :param encryption_key: Contains information about the encryption key. + :type encryption_key: ~storage_import_export.models.EncryptionKeyDetails + """ + + _attribute_map = { + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'return_address': {'key': 'returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'returnShipping', 'type': 'ReturnShipping'}, + 'shipping_information': {'key': 'shippingInformation', 'type': 'ShippingInformation'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageInformation'}, + 'diagnostics_path': {'key': 'diagnosticsPath', 'type': 'str'}, + 'log_level': {'key': 'logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'backupDriveManifest', 'type': 'bool'}, + 'state': {'key': 'state', 'type': 'str'}, + 'cancel_requested': {'key': 'cancelRequested', 'type': 'bool'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'long'}, + 'incomplete_blob_list_uri': {'key': 'incompleteBlobListUri', 'type': 'str'}, + 'drive_list': {'key': 'driveList', 'type': '[DriveStatus]'}, + 'export': {'key': 'export', 'type': 'Export'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'encryption_key': {'key': 'encryptionKey', 'type': 'EncryptionKeyDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(JobDetails, self).__init__(**kwargs) + self.storage_account_id = kwargs.get('storage_account_id', None) + self.job_type = kwargs.get('job_type', None) + self.return_address = kwargs.get('return_address', None) + self.return_shipping = kwargs.get('return_shipping', None) + self.shipping_information = kwargs.get('shipping_information', None) + self.delivery_package = kwargs.get('delivery_package', None) + self.return_package = kwargs.get('return_package', None) + self.diagnostics_path = kwargs.get('diagnostics_path', None) + self.log_level = kwargs.get('log_level', None) + self.backup_drive_manifest = kwargs.get('backup_drive_manifest', None) + self.state = kwargs.get('state', None) + self.cancel_requested = kwargs.get('cancel_requested', None) + self.percent_complete = kwargs.get('percent_complete', None) + self.incomplete_blob_list_uri = kwargs.get('incomplete_blob_list_uri', None) + self.drive_list = kwargs.get('drive_list', None) + self.export = kwargs.get('export', None) + self.provisioning_state = kwargs.get('provisioning_state', None) + self.encryption_key = kwargs.get('encryption_key', None) + + +class JobResponse(msrest.serialization.Model): + """Contains the job information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar system_data: SystemData of ImportExport Jobs. + :vartype system_data: ~storage_import_export.models.SystemData + :ivar id: Specifies the resource identifier of the job. + :vartype id: str + :ivar name: Specifies the name of the job. + :vartype name: str + :ivar type: Specifies the type of the job resource. + :vartype type: str + :param location: Specifies the Azure location where the job is created. + :type location: str + :param tags: A set of tags. Specifies the tags that are assigned to the job. + :type tags: any + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + :param identity: Specifies the job identity details. + :type identity: ~storage_import_export.models.IdentityDetails + """ + + _validation = { + 'system_data': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + 'identity': {'key': 'identity', 'type': 'IdentityDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(JobResponse, self).__init__(**kwargs) + self.system_data = None + self.id = None + self.name = None + self.type = None + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.properties = kwargs.get('properties', None) + self.identity = kwargs.get('identity', None) + + +class ListJobsResponse(msrest.serialization.Model): + """List jobs response. + + :param next_link: link to next batch of jobs. + :type next_link: str + :param value: Job list. + :type value: list[~storage_import_export.models.JobResponse] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[JobResponse]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListJobsResponse, self).__init__(**kwargs) + self.next_link = kwargs.get('next_link', None) + self.value = kwargs.get('value', None) + + +class ListOperationsResponse(msrest.serialization.Model): + """List operations response. + + :param value: operations. + :type value: list[~storage_import_export.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + **kwargs + ): + super(ListOperationsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class Location(msrest.serialization.Model): + """Provides information about an Azure data center location. + + :param id: Specifies the resource identifier of the location. + :type id: str + :param name: Specifies the name of the location. Use List Locations to get all supported + locations. + :type name: str + :param type: Specifies the type of the location. + :type type: str + :param recipient_name: The recipient name to use when shipping the drives to the Azure data + center. + :type recipient_name: str + :param street_address1: The first line of the street address to use when shipping the drives to + the Azure data center. + :type street_address1: str + :param street_address2: The second line of the street address to use when shipping the drives + to the Azure data center. + :type street_address2: str + :param city: The city name to use when shipping the drives to the Azure data center. + :type city: str + :param state_or_province: The state or province to use when shipping the drives to the Azure + data center. + :type state_or_province: str + :param postal_code: The postal code to use when shipping the drives to the Azure data center. + :type postal_code: str + :param country_or_region: The country or region to use when shipping the drives to the Azure + data center. + :type country_or_region: str + :param phone: The phone number for the Azure data center. + :type phone: str + :param additional_shipping_information: Additional shipping information for customer, specific + to datacenter to which customer should send their disks. + :type additional_shipping_information: str + :param supported_carriers: A list of carriers that are supported at this location. + :type supported_carriers: list[str] + :param alternate_locations: A list of location IDs that should be used to ship shipping drives + to for jobs created against the current location. If the current location is active, it will be + part of the list. If it is temporarily closed due to maintenance, this list may contain other + locations. + :type alternate_locations: list[str] + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recipient_name': {'key': 'properties.recipientName', 'type': 'str'}, + 'street_address1': {'key': 'properties.streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'properties.streetAddress2', 'type': 'str'}, + 'city': {'key': 'properties.city', 'type': 'str'}, + 'state_or_province': {'key': 'properties.stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'properties.postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'properties.countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'properties.phone', 'type': 'str'}, + 'additional_shipping_information': {'key': 'properties.additionalShippingInformation', 'type': 'str'}, + 'supported_carriers': {'key': 'properties.supportedCarriers', 'type': '[str]'}, + 'alternate_locations': {'key': 'properties.alternateLocations', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(Location, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + self.recipient_name = kwargs.get('recipient_name', None) + self.street_address1 = kwargs.get('street_address1', None) + self.street_address2 = kwargs.get('street_address2', None) + self.city = kwargs.get('city', None) + self.state_or_province = kwargs.get('state_or_province', None) + self.postal_code = kwargs.get('postal_code', None) + self.country_or_region = kwargs.get('country_or_region', None) + self.phone = kwargs.get('phone', None) + self.additional_shipping_information = kwargs.get('additional_shipping_information', None) + self.supported_carriers = kwargs.get('supported_carriers', None) + self.alternate_locations = kwargs.get('alternate_locations', None) + + +class LocationsResponse(msrest.serialization.Model): + """Locations response. + + :param value: locations. + :type value: list[~storage_import_export.models.Location] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Location]'}, + } + + def __init__( + self, + **kwargs + ): + super(LocationsResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + +class Operation(msrest.serialization.Model): + """Describes a supported operation by the Storage Import/Export job API. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the operation. + :type name: str + :param provider: The resource provider name to which the operation belongs. + :type provider: str + :param resource: The name of the resource to which the operation belongs. + :type resource: str + :param operation: The display name of the operation. + :type operation: str + :param description: Short description of the operation. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'provider': {'key': 'display.provider', 'type': 'str'}, + 'resource': {'key': 'display.resource', 'type': 'str'}, + 'operation': {'key': 'display.operation', 'type': 'str'}, + 'description': {'key': 'display.description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = kwargs['name'] + self.provider = kwargs.get('provider', None) + self.resource = kwargs.get('resource', None) + self.operation = kwargs.get('operation', None) + self.description = kwargs.get('description', None) + + +class PackageInformation(msrest.serialization.Model): + """Contains information about the package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: Required. The number of drives included in the package. + :type drive_count: long + :param ship_date: Required. The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + 'drive_count': {'required': True}, + 'ship_date': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'long'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(PackageInformation, self).__init__(**kwargs) + self.carrier_name = kwargs['carrier_name'] + self.tracking_number = kwargs['tracking_number'] + self.drive_count = kwargs['drive_count'] + self.ship_date = kwargs['ship_date'] + + +class PutJobParameters(msrest.serialization.Model): + """Put Job parameters. + + :param location: Specifies the supported Azure location where the job should be created. + :type location: str + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: any + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + """ + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + } + + def __init__( + self, + **kwargs + ): + super(PutJobParameters, self).__init__(**kwargs) + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.properties = kwargs.get('properties', None) + + +class ReturnAddress(msrest.serialization.Model): + """Specifies the return address information for the job. + + All required parameters must be populated in order to send to Azure. + + :param recipient_name: Required. The name of the recipient who will receive the hard drives + when they are returned. + :type recipient_name: str + :param street_address1: Required. The first line of the street address to use when returning + the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: Required. The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: Required. The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: Required. The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Required. Phone number of the recipient of the returned drives. + :type phone: str + :param email: Required. Email address of the recipient of the returned drives. + :type email: str + """ + + _validation = { + 'recipient_name': {'required': True}, + 'street_address1': {'required': True}, + 'city': {'required': True}, + 'postal_code': {'required': True}, + 'country_or_region': {'required': True}, + 'phone': {'required': True}, + 'email': {'required': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'email': {'key': 'email', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ReturnAddress, self).__init__(**kwargs) + self.recipient_name = kwargs['recipient_name'] + self.street_address1 = kwargs['street_address1'] + self.street_address2 = kwargs.get('street_address2', None) + self.city = kwargs['city'] + self.state_or_province = kwargs.get('state_or_province', None) + self.postal_code = kwargs['postal_code'] + self.country_or_region = kwargs['country_or_region'] + self.phone = kwargs['phone'] + self.email = kwargs['email'] + + +class ReturnShipping(msrest.serialization.Model): + """Specifies the return carrier and customer's account with the carrier. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The carrier's name. + :type carrier_name: str + :param carrier_account_number: Required. The customer's account number with the carrier. + :type carrier_account_number: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'carrier_account_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ReturnShipping, self).__init__(**kwargs) + self.carrier_name = kwargs['carrier_name'] + self.carrier_account_number = kwargs['carrier_account_number'] + + +class ShippingInformation(msrest.serialization.Model): + """Contains information about the Microsoft datacenter to which the drives should be shipped. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param recipient_name: The name of the recipient who will receive the hard drives when they are + returned. + :type recipient_name: str + :param street_address1: The first line of the street address to use when returning the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Phone number of the recipient of the returned drives. + :type phone: str + :ivar additional_information: Additional shipping information for customer, specific to + datacenter to which customer should send their disks. + :vartype additional_information: str + """ + + _validation = { + 'additional_information': {'readonly': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'additional_information': {'key': 'additionalInformation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ShippingInformation, self).__init__(**kwargs) + self.recipient_name = kwargs.get('recipient_name', None) + self.street_address1 = kwargs.get('street_address1', None) + self.street_address2 = kwargs.get('street_address2', None) + self.city = kwargs.get('city', None) + self.state_or_province = kwargs.get('state_or_province', None) + self.postal_code = kwargs.get('postal_code', None) + self.country_or_region = kwargs.get('country_or_region', None) + self.phone = kwargs.get('phone', None) + self.additional_information = None + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~storage_import_export.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~storage_import_export.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = kwargs.get('created_by', None) + self.created_by_type = kwargs.get('created_by_type', None) + self.created_at = kwargs.get('created_at', None) + self.last_modified_by = kwargs.get('last_modified_by', None) + self.last_modified_by_type = kwargs.get('last_modified_by_type', None) + self.last_modified_at = kwargs.get('last_modified_at', None) + + +class UpdateJobParameters(msrest.serialization.Model): + """Update Job parameters. + + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: any + :param cancel_requested: If specified, the value must be true. The service will attempt to + cancel the job. + :type cancel_requested: bool + :param state: If specified, the value must be Shipping, which tells the Import/Export service + that the package for the job has been shipped. The ReturnAddress and DeliveryPackage properties + must have been set either in this request or in a previous request, otherwise the request will + fail. + :type state: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param log_level: Indicates whether error logging or verbose logging is enabled. + :type log_level: str + :param backup_drive_manifest: Indicates whether the manifest files on the drives should be + copied to block blobs. + :type backup_drive_manifest: bool + :param drive_list: List of drives that comprise the job. + :type drive_list: list[~storage_import_export.models.DriveStatus] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': 'object'}, + 'cancel_requested': {'key': 'properties.cancelRequested', 'type': 'bool'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'return_address': {'key': 'properties.returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'properties.returnShipping', 'type': 'ReturnShipping'}, + 'delivery_package': {'key': 'properties.deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'log_level': {'key': 'properties.logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'properties.backupDriveManifest', 'type': 'bool'}, + 'drive_list': {'key': 'properties.driveList', 'type': '[DriveStatus]'}, + } + + def __init__( + self, + **kwargs + ): + super(UpdateJobParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.cancel_requested = kwargs.get('cancel_requested', None) + self.state = kwargs.get('state', None) + self.return_address = kwargs.get('return_address', None) + self.return_shipping = kwargs.get('return_shipping', None) + self.delivery_package = kwargs.get('delivery_package', None) + self.log_level = kwargs.get('log_level', None) + self.backup_drive_manifest = kwargs.get('backup_drive_manifest', None) + self.drive_list = kwargs.get('drive_list', None) diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_models_py3.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_models_py3.py new file mode 100644 index 000000000000..2ddeb31c5fcd --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_models_py3.py @@ -0,0 +1,1118 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Any, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + +from ._storage_import_export_enums import * + + +class DeliveryPackageInformation(msrest.serialization.Model): + """Contains information about the delivery package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: The number of drives included in the package. + :type drive_count: long + :param ship_date: The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'long'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: str, + tracking_number: str, + drive_count: Optional[int] = None, + ship_date: Optional[str] = None, + **kwargs + ): + super(DeliveryPackageInformation, self).__init__(**kwargs) + self.carrier_name = carrier_name + self.tracking_number = tracking_number + self.drive_count = drive_count + self.ship_date = ship_date + + +class DriveBitLockerKey(msrest.serialization.Model): + """BitLocker recovery key or password to the specified drive. + + :param bit_locker_key: BitLocker recovery key or password. + :type bit_locker_key: str + :param drive_id: Drive ID. + :type drive_id: str + """ + + _attribute_map = { + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'drive_id': {'key': 'driveId', 'type': 'str'}, + } + + def __init__( + self, + *, + bit_locker_key: Optional[str] = None, + drive_id: Optional[str] = None, + **kwargs + ): + super(DriveBitLockerKey, self).__init__(**kwargs) + self.bit_locker_key = bit_locker_key + self.drive_id = drive_id + + +class DriveStatus(msrest.serialization.Model): + """Provides information about the drive's status. + + :param drive_id: The drive's hardware serial number, without spaces. + :type drive_id: str + :param bit_locker_key: The BitLocker key used to encrypt the drive. + :type bit_locker_key: str + :param manifest_file: The relative path of the manifest file on the drive. + :type manifest_file: str + :param manifest_hash: The Base16-encoded MD5 hash of the manifest file on the drive. + :type manifest_hash: str + :param drive_header_hash: The drive header hash value. + :type drive_header_hash: str + :param state: The drive's current state. Possible values include: "Specified", "Received", + "NeverReceived", "Transferring", "Completed", "CompletedMoreInfo", "ShippedBack". + :type state: str or ~storage_import_export.models.DriveState + :param copy_status: Detailed status about the data transfer process. This field is not returned + in the response until the drive is in the Transferring state. + :type copy_status: str + :param percent_complete: Percentage completed for the drive. + :type percent_complete: long + :param verbose_log_uri: A URI that points to the blob containing the verbose log for the data + transfer operation. + :type verbose_log_uri: str + :param error_log_uri: A URI that points to the blob containing the error log for the data + transfer operation. + :type error_log_uri: str + :param manifest_uri: A URI that points to the blob containing the drive manifest file. + :type manifest_uri: str + :param bytes_succeeded: Bytes successfully transferred for the drive. + :type bytes_succeeded: long + """ + + _attribute_map = { + 'drive_id': {'key': 'driveId', 'type': 'str'}, + 'bit_locker_key': {'key': 'bitLockerKey', 'type': 'str'}, + 'manifest_file': {'key': 'manifestFile', 'type': 'str'}, + 'manifest_hash': {'key': 'manifestHash', 'type': 'str'}, + 'drive_header_hash': {'key': 'driveHeaderHash', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'copy_status': {'key': 'copyStatus', 'type': 'str'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'long'}, + 'verbose_log_uri': {'key': 'verboseLogUri', 'type': 'str'}, + 'error_log_uri': {'key': 'errorLogUri', 'type': 'str'}, + 'manifest_uri': {'key': 'manifestUri', 'type': 'str'}, + 'bytes_succeeded': {'key': 'bytesSucceeded', 'type': 'long'}, + } + + def __init__( + self, + *, + drive_id: Optional[str] = None, + bit_locker_key: Optional[str] = None, + manifest_file: Optional[str] = None, + manifest_hash: Optional[str] = None, + drive_header_hash: Optional[str] = None, + state: Optional[Union[str, "DriveState"]] = None, + copy_status: Optional[str] = None, + percent_complete: Optional[int] = None, + verbose_log_uri: Optional[str] = None, + error_log_uri: Optional[str] = None, + manifest_uri: Optional[str] = None, + bytes_succeeded: Optional[int] = None, + **kwargs + ): + super(DriveStatus, self).__init__(**kwargs) + self.drive_id = drive_id + self.bit_locker_key = bit_locker_key + self.manifest_file = manifest_file + self.manifest_hash = manifest_hash + self.drive_header_hash = drive_header_hash + self.state = state + self.copy_status = copy_status + self.percent_complete = percent_complete + self.verbose_log_uri = verbose_log_uri + self.error_log_uri = error_log_uri + self.manifest_uri = manifest_uri + self.bytes_succeeded = bytes_succeeded + + +class EncryptionKeyDetails(msrest.serialization.Model): + """Specifies the encryption key properties. + + :param kek_type: The type of kek encryption key. Possible values include: "MicrosoftManaged", + "CustomerManaged". Default value: "MicrosoftManaged". + :type kek_type: str or ~storage_import_export.models.EncryptionKekType + :param kek_url: Specifies the url for kek encryption key. + :type kek_url: str + :param kek_vault_resource_id: Specifies the keyvault resource id for kek encryption key. + :type kek_vault_resource_id: str + """ + + _attribute_map = { + 'kek_type': {'key': 'kekType', 'type': 'str'}, + 'kek_url': {'key': 'kekUrl', 'type': 'str'}, + 'kek_vault_resource_id': {'key': 'kekVaultResourceID', 'type': 'str'}, + } + + def __init__( + self, + *, + kek_type: Optional[Union[str, "EncryptionKekType"]] = "MicrosoftManaged", + kek_url: Optional[str] = None, + kek_vault_resource_id: Optional[str] = None, + **kwargs + ): + super(EncryptionKeyDetails, self).__init__(**kwargs) + self.kek_type = kek_type + self.kek_url = kek_url + self.kek_vault_resource_id = kek_vault_resource_id + + +class ErrorResponse(msrest.serialization.Model): + """Response when errors occurred. + + :param code: Provides information about the error code. + :type code: str + :param message: Provides information about the error message. + :type message: str + :param target: Provides information about the error target. + :type target: str + :param details: Describes the error details if present. + :type details: list[~storage_import_export.models.ErrorResponseErrorDetailsItem] + :param innererror: Inner error object if present. + :type innererror: any + """ + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[ErrorResponseErrorDetailsItem]'}, + 'innererror': {'key': 'error.innererror', 'type': 'object'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + message: Optional[str] = None, + target: Optional[str] = None, + details: Optional[List["ErrorResponseErrorDetailsItem"]] = None, + innererror: Optional[Any] = None, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + self.innererror = innererror + + +class ErrorResponseErrorDetailsItem(msrest.serialization.Model): + """ErrorResponseErrorDetailsItem. + + :param code: Provides information about the error code. + :type code: str + :param target: Provides information about the error target. + :type target: str + :param message: Provides information about the error message. + :type message: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + target: Optional[str] = None, + message: Optional[str] = None, + **kwargs + ): + super(ErrorResponseErrorDetailsItem, self).__init__(**kwargs) + self.code = code + self.target = target + self.message = message + + +class Export(msrest.serialization.Model): + """A property containing information about the blobs to be exported for an export job. This property is required for export jobs, but must not be specified for import jobs. + + :param blob_list_blob_path: The relative URI to the block blob that contains the list of blob + paths or blob path prefixes as defined above, beginning with the container name. If the blob is + in root container, the URI must begin with $root. + :type blob_list_blob_path: str + :param blob_path: A collection of blob-path strings. + :type blob_path: list[str] + :param blob_path_prefix: A collection of blob-prefix strings. + :type blob_path_prefix: list[str] + """ + + _attribute_map = { + 'blob_list_blob_path': {'key': 'blobListBlobPath', 'type': 'str'}, + 'blob_path': {'key': 'blobList.blobPath', 'type': '[str]'}, + 'blob_path_prefix': {'key': 'blobList.blobPathPrefix', 'type': '[str]'}, + } + + def __init__( + self, + *, + blob_list_blob_path: Optional[str] = None, + blob_path: Optional[List[str]] = None, + blob_path_prefix: Optional[List[str]] = None, + **kwargs + ): + super(Export, self).__init__(**kwargs) + self.blob_list_blob_path = blob_list_blob_path + self.blob_path = blob_path + self.blob_path_prefix = blob_path_prefix + + +class GetBitLockerKeysResponse(msrest.serialization.Model): + """GetBitLockerKeys response. + + :param value: drive status. + :type value: list[~storage_import_export.models.DriveBitLockerKey] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[DriveBitLockerKey]'}, + } + + def __init__( + self, + *, + value: Optional[List["DriveBitLockerKey"]] = None, + **kwargs + ): + super(GetBitLockerKeysResponse, self).__init__(**kwargs) + self.value = value + + +class IdentityDetails(msrest.serialization.Model): + """Specifies the identity properties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param type: The type of identity. Possible values include: "None", "SystemAssigned", + "UserAssigned". Default value: "None". + :type type: str or ~storage_import_export.models.IdentityType + :ivar principal_id: Specifies the principal id for the identity for the job. + :vartype principal_id: str + :ivar tenant_id: Specifies the tenant id for the identity for the job. + :vartype tenant_id: str + """ + + _validation = { + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "IdentityType"]] = "None", + **kwargs + ): + super(IdentityDetails, self).__init__(**kwargs) + self.type = type + self.principal_id = None + self.tenant_id = None + + +class JobDetails(msrest.serialization.Model): + """Specifies the job properties. + + :param storage_account_id: The resource identifier of the storage account where data will be + imported to or exported from. + :type storage_account_id: str + :param job_type: The type of job. + :type job_type: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param shipping_information: Contains information about the Microsoft datacenter to which the + drives should be shipped. + :type shipping_information: ~storage_import_export.models.ShippingInformation + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param return_package: Contains information about the package being shipped from the Microsoft + data center to the customer to return the drives. The format is the same as the deliveryPackage + property above. This property is not included if the drives have not yet been returned. + :type return_package: ~storage_import_export.models.PackageInformation + :param diagnostics_path: The virtual blob directory to which the copy logs and backups of drive + manifest files (if enabled) will be stored. + :type diagnostics_path: str + :param log_level: Default value is Error. Indicates whether error logging or verbose logging + will be enabled. + :type log_level: str + :param backup_drive_manifest: Default value is false. Indicates whether the manifest files on + the drives should be copied to block blobs. + :type backup_drive_manifest: bool + :param state: Current state of the job. + :type state: str + :param cancel_requested: Indicates whether a request has been submitted to cancel the job. + :type cancel_requested: bool + :param percent_complete: Overall percentage completed for the job. + :type percent_complete: long + :param incomplete_blob_list_uri: A blob path that points to a block blob containing a list of + blob names that were not exported due to insufficient drive space. If all blobs were exported + successfully, then this element is not included in the response. + :type incomplete_blob_list_uri: str + :param drive_list: List of up to ten drives that comprise the job. The drive list is a required + element for an import job; it is not specified for export jobs. + :type drive_list: list[~storage_import_export.models.DriveStatus] + :param export: A property containing information about the blobs to be exported for an export + job. This property is included for export jobs only. + :type export: ~storage_import_export.models.Export + :param provisioning_state: Specifies the provisioning state of the job. + :type provisioning_state: str + :param encryption_key: Contains information about the encryption key. + :type encryption_key: ~storage_import_export.models.EncryptionKeyDetails + """ + + _attribute_map = { + 'storage_account_id': {'key': 'storageAccountId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'return_address': {'key': 'returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'returnShipping', 'type': 'ReturnShipping'}, + 'shipping_information': {'key': 'shippingInformation', 'type': 'ShippingInformation'}, + 'delivery_package': {'key': 'deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'return_package': {'key': 'returnPackage', 'type': 'PackageInformation'}, + 'diagnostics_path': {'key': 'diagnosticsPath', 'type': 'str'}, + 'log_level': {'key': 'logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'backupDriveManifest', 'type': 'bool'}, + 'state': {'key': 'state', 'type': 'str'}, + 'cancel_requested': {'key': 'cancelRequested', 'type': 'bool'}, + 'percent_complete': {'key': 'percentComplete', 'type': 'long'}, + 'incomplete_blob_list_uri': {'key': 'incompleteBlobListUri', 'type': 'str'}, + 'drive_list': {'key': 'driveList', 'type': '[DriveStatus]'}, + 'export': {'key': 'export', 'type': 'Export'}, + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'encryption_key': {'key': 'encryptionKey', 'type': 'EncryptionKeyDetails'}, + } + + def __init__( + self, + *, + storage_account_id: Optional[str] = None, + job_type: Optional[str] = None, + return_address: Optional["ReturnAddress"] = None, + return_shipping: Optional["ReturnShipping"] = None, + shipping_information: Optional["ShippingInformation"] = None, + delivery_package: Optional["DeliveryPackageInformation"] = None, + return_package: Optional["PackageInformation"] = None, + diagnostics_path: Optional[str] = None, + log_level: Optional[str] = None, + backup_drive_manifest: Optional[bool] = None, + state: Optional[str] = None, + cancel_requested: Optional[bool] = None, + percent_complete: Optional[int] = None, + incomplete_blob_list_uri: Optional[str] = None, + drive_list: Optional[List["DriveStatus"]] = None, + export: Optional["Export"] = None, + provisioning_state: Optional[str] = None, + encryption_key: Optional["EncryptionKeyDetails"] = None, + **kwargs + ): + super(JobDetails, self).__init__(**kwargs) + self.storage_account_id = storage_account_id + self.job_type = job_type + self.return_address = return_address + self.return_shipping = return_shipping + self.shipping_information = shipping_information + self.delivery_package = delivery_package + self.return_package = return_package + self.diagnostics_path = diagnostics_path + self.log_level = log_level + self.backup_drive_manifest = backup_drive_manifest + self.state = state + self.cancel_requested = cancel_requested + self.percent_complete = percent_complete + self.incomplete_blob_list_uri = incomplete_blob_list_uri + self.drive_list = drive_list + self.export = export + self.provisioning_state = provisioning_state + self.encryption_key = encryption_key + + +class JobResponse(msrest.serialization.Model): + """Contains the job information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar system_data: SystemData of ImportExport Jobs. + :vartype system_data: ~storage_import_export.models.SystemData + :ivar id: Specifies the resource identifier of the job. + :vartype id: str + :ivar name: Specifies the name of the job. + :vartype name: str + :ivar type: Specifies the type of the job resource. + :vartype type: str + :param location: Specifies the Azure location where the job is created. + :type location: str + :param tags: A set of tags. Specifies the tags that are assigned to the job. + :type tags: any + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + :param identity: Specifies the job identity details. + :type identity: ~storage_import_export.models.IdentityDetails + """ + + _validation = { + 'system_data': {'readonly': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + } + + _attribute_map = { + 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + 'identity': {'key': 'identity', 'type': 'IdentityDetails'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Any] = None, + properties: Optional["JobDetails"] = None, + identity: Optional["IdentityDetails"] = None, + **kwargs + ): + super(JobResponse, self).__init__(**kwargs) + self.system_data = None + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags + self.properties = properties + self.identity = identity + + +class ListJobsResponse(msrest.serialization.Model): + """List jobs response. + + :param next_link: link to next batch of jobs. + :type next_link: str + :param value: Job list. + :type value: list[~storage_import_export.models.JobResponse] + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[JobResponse]'}, + } + + def __init__( + self, + *, + next_link: Optional[str] = None, + value: Optional[List["JobResponse"]] = None, + **kwargs + ): + super(ListJobsResponse, self).__init__(**kwargs) + self.next_link = next_link + self.value = value + + +class ListOperationsResponse(msrest.serialization.Model): + """List operations response. + + :param value: operations. + :type value: list[~storage_import_export.models.Operation] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Operation]'}, + } + + def __init__( + self, + *, + value: Optional[List["Operation"]] = None, + **kwargs + ): + super(ListOperationsResponse, self).__init__(**kwargs) + self.value = value + + +class Location(msrest.serialization.Model): + """Provides information about an Azure data center location. + + :param id: Specifies the resource identifier of the location. + :type id: str + :param name: Specifies the name of the location. Use List Locations to get all supported + locations. + :type name: str + :param type: Specifies the type of the location. + :type type: str + :param recipient_name: The recipient name to use when shipping the drives to the Azure data + center. + :type recipient_name: str + :param street_address1: The first line of the street address to use when shipping the drives to + the Azure data center. + :type street_address1: str + :param street_address2: The second line of the street address to use when shipping the drives + to the Azure data center. + :type street_address2: str + :param city: The city name to use when shipping the drives to the Azure data center. + :type city: str + :param state_or_province: The state or province to use when shipping the drives to the Azure + data center. + :type state_or_province: str + :param postal_code: The postal code to use when shipping the drives to the Azure data center. + :type postal_code: str + :param country_or_region: The country or region to use when shipping the drives to the Azure + data center. + :type country_or_region: str + :param phone: The phone number for the Azure data center. + :type phone: str + :param additional_shipping_information: Additional shipping information for customer, specific + to datacenter to which customer should send their disks. + :type additional_shipping_information: str + :param supported_carriers: A list of carriers that are supported at this location. + :type supported_carriers: list[str] + :param alternate_locations: A list of location IDs that should be used to ship shipping drives + to for jobs created against the current location. If the current location is active, it will be + part of the list. If it is temporarily closed due to maintenance, this list may contain other + locations. + :type alternate_locations: list[str] + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recipient_name': {'key': 'properties.recipientName', 'type': 'str'}, + 'street_address1': {'key': 'properties.streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'properties.streetAddress2', 'type': 'str'}, + 'city': {'key': 'properties.city', 'type': 'str'}, + 'state_or_province': {'key': 'properties.stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'properties.postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'properties.countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'properties.phone', 'type': 'str'}, + 'additional_shipping_information': {'key': 'properties.additionalShippingInformation', 'type': 'str'}, + 'supported_carriers': {'key': 'properties.supportedCarriers', 'type': '[str]'}, + 'alternate_locations': {'key': 'properties.alternateLocations', 'type': '[str]'}, + } + + def __init__( + self, + *, + id: Optional[str] = None, + name: Optional[str] = None, + type: Optional[str] = None, + recipient_name: Optional[str] = None, + street_address1: Optional[str] = None, + street_address2: Optional[str] = None, + city: Optional[str] = None, + state_or_province: Optional[str] = None, + postal_code: Optional[str] = None, + country_or_region: Optional[str] = None, + phone: Optional[str] = None, + additional_shipping_information: Optional[str] = None, + supported_carriers: Optional[List[str]] = None, + alternate_locations: Optional[List[str]] = None, + **kwargs + ): + super(Location, self).__init__(**kwargs) + self.id = id + self.name = name + self.type = type + self.recipient_name = recipient_name + self.street_address1 = street_address1 + self.street_address2 = street_address2 + self.city = city + self.state_or_province = state_or_province + self.postal_code = postal_code + self.country_or_region = country_or_region + self.phone = phone + self.additional_shipping_information = additional_shipping_information + self.supported_carriers = supported_carriers + self.alternate_locations = alternate_locations + + +class LocationsResponse(msrest.serialization.Model): + """Locations response. + + :param value: locations. + :type value: list[~storage_import_export.models.Location] + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Location]'}, + } + + def __init__( + self, + *, + value: Optional[List["Location"]] = None, + **kwargs + ): + super(LocationsResponse, self).__init__(**kwargs) + self.value = value + + +class Operation(msrest.serialization.Model): + """Describes a supported operation by the Storage Import/Export job API. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. Name of the operation. + :type name: str + :param provider: The resource provider name to which the operation belongs. + :type provider: str + :param resource: The name of the resource to which the operation belongs. + :type resource: str + :param operation: The display name of the operation. + :type operation: str + :param description: Short description of the operation. + :type description: str + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'provider': {'key': 'display.provider', 'type': 'str'}, + 'resource': {'key': 'display.resource', 'type': 'str'}, + 'operation': {'key': 'display.operation', 'type': 'str'}, + 'description': {'key': 'display.description', 'type': 'str'}, + } + + def __init__( + self, + *, + name: str, + provider: Optional[str] = None, + resource: Optional[str] = None, + operation: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(Operation, self).__init__(**kwargs) + self.name = name + self.provider = provider + self.resource = resource + self.operation = operation + self.description = description + + +class PackageInformation(msrest.serialization.Model): + """Contains information about the package being shipped by the customer to the Microsoft data center. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The name of the carrier that is used to ship the import or + export drives. + :type carrier_name: str + :param tracking_number: Required. The tracking number of the package. + :type tracking_number: str + :param drive_count: Required. The number of drives included in the package. + :type drive_count: long + :param ship_date: Required. The date when the package is shipped. + :type ship_date: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'tracking_number': {'required': True}, + 'drive_count': {'required': True}, + 'ship_date': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'tracking_number': {'key': 'trackingNumber', 'type': 'str'}, + 'drive_count': {'key': 'driveCount', 'type': 'long'}, + 'ship_date': {'key': 'shipDate', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: str, + tracking_number: str, + drive_count: int, + ship_date: str, + **kwargs + ): + super(PackageInformation, self).__init__(**kwargs) + self.carrier_name = carrier_name + self.tracking_number = tracking_number + self.drive_count = drive_count + self.ship_date = ship_date + + +class PutJobParameters(msrest.serialization.Model): + """Put Job parameters. + + :param location: Specifies the supported Azure location where the job should be created. + :type location: str + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: any + :param properties: Specifies the job properties. + :type properties: ~storage_import_export.models.JobDetails + """ + + _attribute_map = { + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': 'object'}, + 'properties': {'key': 'properties', 'type': 'JobDetails'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Any] = None, + properties: Optional["JobDetails"] = None, + **kwargs + ): + super(PutJobParameters, self).__init__(**kwargs) + self.location = location + self.tags = tags + self.properties = properties + + +class ReturnAddress(msrest.serialization.Model): + """Specifies the return address information for the job. + + All required parameters must be populated in order to send to Azure. + + :param recipient_name: Required. The name of the recipient who will receive the hard drives + when they are returned. + :type recipient_name: str + :param street_address1: Required. The first line of the street address to use when returning + the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: Required. The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: Required. The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: Required. The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Required. Phone number of the recipient of the returned drives. + :type phone: str + :param email: Required. Email address of the recipient of the returned drives. + :type email: str + """ + + _validation = { + 'recipient_name': {'required': True}, + 'street_address1': {'required': True}, + 'city': {'required': True}, + 'postal_code': {'required': True}, + 'country_or_region': {'required': True}, + 'phone': {'required': True}, + 'email': {'required': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'email': {'key': 'email', 'type': 'str'}, + } + + def __init__( + self, + *, + recipient_name: str, + street_address1: str, + city: str, + postal_code: str, + country_or_region: str, + phone: str, + email: str, + street_address2: Optional[str] = None, + state_or_province: Optional[str] = None, + **kwargs + ): + super(ReturnAddress, self).__init__(**kwargs) + self.recipient_name = recipient_name + self.street_address1 = street_address1 + self.street_address2 = street_address2 + self.city = city + self.state_or_province = state_or_province + self.postal_code = postal_code + self.country_or_region = country_or_region + self.phone = phone + self.email = email + + +class ReturnShipping(msrest.serialization.Model): + """Specifies the return carrier and customer's account with the carrier. + + All required parameters must be populated in order to send to Azure. + + :param carrier_name: Required. The carrier's name. + :type carrier_name: str + :param carrier_account_number: Required. The customer's account number with the carrier. + :type carrier_account_number: str + """ + + _validation = { + 'carrier_name': {'required': True}, + 'carrier_account_number': {'required': True}, + } + + _attribute_map = { + 'carrier_name': {'key': 'carrierName', 'type': 'str'}, + 'carrier_account_number': {'key': 'carrierAccountNumber', 'type': 'str'}, + } + + def __init__( + self, + *, + carrier_name: str, + carrier_account_number: str, + **kwargs + ): + super(ReturnShipping, self).__init__(**kwargs) + self.carrier_name = carrier_name + self.carrier_account_number = carrier_account_number + + +class ShippingInformation(msrest.serialization.Model): + """Contains information about the Microsoft datacenter to which the drives should be shipped. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param recipient_name: The name of the recipient who will receive the hard drives when they are + returned. + :type recipient_name: str + :param street_address1: The first line of the street address to use when returning the drives. + :type street_address1: str + :param street_address2: The second line of the street address to use when returning the drives. + :type street_address2: str + :param city: The city name to use when returning the drives. + :type city: str + :param state_or_province: The state or province to use when returning the drives. + :type state_or_province: str + :param postal_code: The postal code to use when returning the drives. + :type postal_code: str + :param country_or_region: The country or region to use when returning the drives. + :type country_or_region: str + :param phone: Phone number of the recipient of the returned drives. + :type phone: str + :ivar additional_information: Additional shipping information for customer, specific to + datacenter to which customer should send their disks. + :vartype additional_information: str + """ + + _validation = { + 'additional_information': {'readonly': True}, + } + + _attribute_map = { + 'recipient_name': {'key': 'recipientName', 'type': 'str'}, + 'street_address1': {'key': 'streetAddress1', 'type': 'str'}, + 'street_address2': {'key': 'streetAddress2', 'type': 'str'}, + 'city': {'key': 'city', 'type': 'str'}, + 'state_or_province': {'key': 'stateOrProvince', 'type': 'str'}, + 'postal_code': {'key': 'postalCode', 'type': 'str'}, + 'country_or_region': {'key': 'countryOrRegion', 'type': 'str'}, + 'phone': {'key': 'phone', 'type': 'str'}, + 'additional_information': {'key': 'additionalInformation', 'type': 'str'}, + } + + def __init__( + self, + *, + recipient_name: Optional[str] = None, + street_address1: Optional[str] = None, + street_address2: Optional[str] = None, + city: Optional[str] = None, + state_or_province: Optional[str] = None, + postal_code: Optional[str] = None, + country_or_region: Optional[str] = None, + phone: Optional[str] = None, + **kwargs + ): + super(ShippingInformation, self).__init__(**kwargs) + self.recipient_name = recipient_name + self.street_address1 = street_address1 + self.street_address2 = street_address2 + self.city = city + self.state_or_province = state_or_province + self.postal_code = postal_code + self.country_or_region = country_or_region + self.phone = phone + self.additional_information = None + + +class SystemData(msrest.serialization.Model): + """Metadata pertaining to creation and last modification of the resource. + + :param created_by: The identity that created the resource. + :type created_by: str + :param created_by_type: The type of identity that created the resource. Possible values + include: "User", "Application", "ManagedIdentity", "Key". + :type created_by_type: str or ~storage_import_export.models.CreatedByType + :param created_at: The timestamp of resource creation (UTC). + :type created_at: ~datetime.datetime + :param last_modified_by: The identity that last modified the resource. + :type last_modified_by: str + :param last_modified_by_type: The type of identity that last modified the resource. Possible + values include: "User", "Application", "ManagedIdentity", "Key". + :type last_modified_by_type: str or ~storage_import_export.models.CreatedByType + :param last_modified_at: The timestamp of resource last modification (UTC). + :type last_modified_at: ~datetime.datetime + """ + + _attribute_map = { + 'created_by': {'key': 'createdBy', 'type': 'str'}, + 'created_by_type': {'key': 'createdByType', 'type': 'str'}, + 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, + 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, + 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, + 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + **kwargs + ): + super(SystemData, self).__init__(**kwargs) + self.created_by = created_by + self.created_by_type = created_by_type + self.created_at = created_at + self.last_modified_by = last_modified_by + self.last_modified_by_type = last_modified_by_type + self.last_modified_at = last_modified_at + + +class UpdateJobParameters(msrest.serialization.Model): + """Update Job parameters. + + :param tags: A set of tags. Specifies the tags that will be assigned to the job. + :type tags: any + :param cancel_requested: If specified, the value must be true. The service will attempt to + cancel the job. + :type cancel_requested: bool + :param state: If specified, the value must be Shipping, which tells the Import/Export service + that the package for the job has been shipped. The ReturnAddress and DeliveryPackage properties + must have been set either in this request or in a previous request, otherwise the request will + fail. + :type state: str + :param return_address: Specifies the return address information for the job. + :type return_address: ~storage_import_export.models.ReturnAddress + :param return_shipping: Specifies the return carrier and customer's account with the carrier. + :type return_shipping: ~storage_import_export.models.ReturnShipping + :param delivery_package: Contains information about the package being shipped by the customer + to the Microsoft data center. + :type delivery_package: ~storage_import_export.models.DeliveryPackageInformation + :param log_level: Indicates whether error logging or verbose logging is enabled. + :type log_level: str + :param backup_drive_manifest: Indicates whether the manifest files on the drives should be + copied to block blobs. + :type backup_drive_manifest: bool + :param drive_list: List of drives that comprise the job. + :type drive_list: list[~storage_import_export.models.DriveStatus] + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': 'object'}, + 'cancel_requested': {'key': 'properties.cancelRequested', 'type': 'bool'}, + 'state': {'key': 'properties.state', 'type': 'str'}, + 'return_address': {'key': 'properties.returnAddress', 'type': 'ReturnAddress'}, + 'return_shipping': {'key': 'properties.returnShipping', 'type': 'ReturnShipping'}, + 'delivery_package': {'key': 'properties.deliveryPackage', 'type': 'DeliveryPackageInformation'}, + 'log_level': {'key': 'properties.logLevel', 'type': 'str'}, + 'backup_drive_manifest': {'key': 'properties.backupDriveManifest', 'type': 'bool'}, + 'drive_list': {'key': 'properties.driveList', 'type': '[DriveStatus]'}, + } + + def __init__( + self, + *, + tags: Optional[Any] = None, + cancel_requested: Optional[bool] = None, + state: Optional[str] = None, + return_address: Optional["ReturnAddress"] = None, + return_shipping: Optional["ReturnShipping"] = None, + delivery_package: Optional["DeliveryPackageInformation"] = None, + log_level: Optional[str] = None, + backup_drive_manifest: Optional[bool] = None, + drive_list: Optional[List["DriveStatus"]] = None, + **kwargs + ): + super(UpdateJobParameters, self).__init__(**kwargs) + self.tags = tags + self.cancel_requested = cancel_requested + self.state = state + self.return_address = return_address + self.return_shipping = return_shipping + self.delivery_package = delivery_package + self.log_level = log_level + self.backup_drive_manifest = backup_drive_manifest + self.drive_list = drive_list diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_storage_import_export_enums.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_storage_import_export_enums.py new file mode 100644 index 000000000000..397faf89d478 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/models/_storage_import_export_enums.py @@ -0,0 +1,63 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum, EnumMeta +from six import with_metaclass + +class _CaseInsensitiveEnumMeta(EnumMeta): + def __getitem__(self, name): + return super().__getitem__(name.upper()) + + def __getattr__(cls, name): + """Return the enum member matching `name` + We use __getattr__ instead of descriptors or inserting into the enum + class' __dict__ in order to support `name` and `value` being both + properties for enum members (which live in the class' __dict__) and + enum members themselves. + """ + try: + return cls._member_map_[name.upper()] + except KeyError: + raise AttributeError(name) + + +class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity that created the resource. + """ + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + +class DriveState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The drive's current state. + """ + + SPECIFIED = "Specified" + RECEIVED = "Received" + NEVER_RECEIVED = "NeverReceived" + TRANSFERRING = "Transferring" + COMPLETED = "Completed" + COMPLETED_MORE_INFO = "CompletedMoreInfo" + SHIPPED_BACK = "ShippedBack" + +class EncryptionKekType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of kek encryption key + """ + + MICROSOFT_MANAGED = "MicrosoftManaged" + CUSTOMER_MANAGED = "CustomerManaged" + +class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + """The type of identity + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/__init__.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/__init__.py new file mode 100644 index 000000000000..e1f9bbe57f6c --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._locations_operations import LocationsOperations +from ._jobs_operations import JobsOperations +from ._bit_locker_keys_operations import BitLockerKeysOperations +from ._operations import Operations + +__all__ = [ + 'LocationsOperations', + 'JobsOperations', + 'BitLockerKeysOperations', + 'Operations', +] diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_bit_locker_keys_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_bit_locker_keys_operations.py new file mode 100644 index 000000000000..c422ab190fa3 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_bit_locker_keys_operations.py @@ -0,0 +1,125 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class BitLockerKeysOperations(object): + """BitLockerKeysOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + job_name, # type: str + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.GetBitLockerKeysResponse"] + """Returns the BitLocker Keys for all drives in the specified job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either GetBitLockerKeysResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.GetBitLockerKeysResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.GetBitLockerKeysResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.post(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('GetBitLockerKeysResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}/listBitLockerKeys'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_jobs_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_jobs_operations.py new file mode 100644 index 000000000000..6555d5aa4e49 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_jobs_operations.py @@ -0,0 +1,489 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class JobsOperations(object): + """JobsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_subscription( + self, + top=None, # type: Optional[int] + filter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ListJobsResponse"] + """Returns all active and completed jobs in a subscription. + + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: long + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_subscription.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'long') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + def list_by_resource_group( + self, + resource_group_name, # type: str + top=None, # type: Optional[int] + filter=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ListJobsResponse"] + """Returns all active and completed jobs in a resource group. + + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param top: An integer value that specifies how many jobs at most should be returned. The value + cannot exceed 100. + :type top: long + :param filter: Can be used to restrict the results to certain conditions. + :type filter: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListJobsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.ListJobsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListJobsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list_by_resource_group.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + if top is not None: + query_parameters['$top'] = self._serialize.query("top", top, 'long') + if filter is not None: + query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListJobsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs'} # type: ignore + + def get( + self, + job_name, # type: str + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.JobResponse" + """Gets information about an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + def update( + self, + job_name, # type: str + resource_group_name, # type: str + body, # type: "_models.UpdateJobParameters" + **kwargs # type: Any + ): + # type: (...) -> "_models.JobResponse" + """Updates specific properties of a job. You can call this operation to notify the Import/Export + service that the hard drives comprising the import or export job have been shipped to the + Microsoft data center. It can also be used to cancel an existing job. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters to update in the job. + :type body: ~storage_import_export.models.UpdateJobParameters + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'UpdateJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + def create( + self, + job_name, # type: str + resource_group_name, # type: str + body, # type: "_models.PutJobParameters" + client_tenant_id=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "_models.JobResponse" + """Creates a new job or updates an existing job in the specified subscription. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :param body: The parameters used for creating the job. + :type body: ~storage_import_export.models.PutJobParameters + :param client_tenant_id: The tenant ID of the client making the request. + :type client_tenant_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobResponse, or the result of cls(response) + :rtype: ~storage_import_export.models.JobResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.JobResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + content_type = kwargs.pop("content_type", "application/json") + accept = "application/json" + + # Construct URL + url = self.create.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + if client_tenant_id is not None: + header_parameters['x-ms-client-tenant-id'] = self._serialize.header("client_tenant_id", client_tenant_id, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(body, 'PutJobParameters') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize('JobResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore + + def delete( + self, + job_name, # type: str + resource_group_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an existing job. Only jobs in the Creating or Completed states can be deleted. + + :param job_name: The name of the import/export job. + :type job_name: str + :param resource_group_name: The resource group name uniquely identifies the resource group + within the user subscription. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None, or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'jobName': self._serialize.url("job_name", job_name, 'str'), + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ImportExport/jobs/{jobName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_locations_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_locations_operations.py new file mode 100644 index 000000000000..15f211adcfe3 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_locations_operations.py @@ -0,0 +1,171 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class LocationsOperations(object): + """LocationsOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.LocationsResponse"] + """Returns a list of locations to which you can ship the disks associated with an import or export + job. A location is a Microsoft data center region. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either LocationsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.LocationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.LocationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('LocationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/locations'} # type: ignore + + def get( + self, + location_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "_models.Location" + """Returns the details about a location to which you can ship the disks associated with an import + or export job. A location is an Azure region. + + :param location_name: The name of the location. For example, West US or westus. + :type location_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Location, or the result of cls(response) + :rtype: ~storage_import_export.models.Location + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.Location"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'locationName': self._serialize.url("location_name", location_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize('Location', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/providers/Microsoft.ImportExport/locations/{locationName}'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_operations.py b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_operations.py new file mode 100644 index 000000000000..8675e33b65ce --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/operations/_operations.py @@ -0,0 +1,112 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class Operations(object): + """Operations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~storage_import_export.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = _models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + **kwargs # type: Any + ): + # type: (...) -> Iterable["_models.ListOperationsResponse"] + """Returns the list of operations supported by the import/export resource provider. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either ListOperationsResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~storage_import_export.models.ListOperationsResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["_models.ListOperationsResponse"] + error_map = { + 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + } + error_map.update(kwargs.pop('error_map', {})) + api_version = "2021-01-01" + accept = "application/json" + + def prepare_request(next_link=None): + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if self._config.accept_language is not None: + header_parameters['Accept-Language'] = self._serialize.header("self._config.accept_language", self._config.accept_language, 'str') + header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + + if not next_link: + # Construct URL + url = self.list.metadata['url'] # type: ignore + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + request = self._client.get(url, query_parameters, header_parameters) + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('ListOperationsResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list.metadata = {'url': '/providers/Microsoft.ImportExport/operations'} # type: ignore diff --git a/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/py.typed b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/azure/mgmt/storageimportexport/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/storage/azure-mgmt-storageimportexport/sdk_packaging.toml b/sdk/storage/azure-mgmt-storageimportexport/sdk_packaging.toml new file mode 100644 index 000000000000..baa0db7562d9 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/sdk_packaging.toml @@ -0,0 +1,9 @@ +[packaging] +package_name = "azure-mgmt-storageimportexport" +package_nspkg = "azure-mgmt-nspkg" +package_pprint_name = "MyService Management" +package_doc_id = "" +is_stable = false +is_arm = true +need_msrestazure = false +need_azuremgmtcore = true diff --git a/sdk/storage/azure-mgmt-storageimportexport/setup.cfg b/sdk/storage/azure-mgmt-storageimportexport/setup.cfg new file mode 100644 index 000000000000..3c6e79cf31da --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/sdk/storage/azure-mgmt-storageimportexport/setup.py b/sdk/storage/azure-mgmt-storageimportexport/setup.py new file mode 100644 index 000000000000..0ebd9b46ab86 --- /dev/null +++ b/sdk/storage/azure-mgmt-storageimportexport/setup.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python + +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + +import re +import os.path +from io import open +from setuptools import find_packages, setup + +# Change the PACKAGE_NAME only to change folder and different name +PACKAGE_NAME = "azure-mgmt-storageimportexport" +PACKAGE_PPRINT_NAME = "MyService Management" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace('-', '/') +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace('-', '.') + +# azure v0.x is not compatible with this package +# azure v0.x used to have a __version__ attribute (newer versions don't) +try: + import azure + try: + ver = azure.__version__ + raise Exception( + 'This package is incompatible with azure=={}. '.format(ver) + + 'Uninstall it with "pip uninstall azure".' + ) + except AttributeError: + pass +except ImportError: + pass + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, 'version.py') + if os.path.exists(os.path.join(package_folder_path, 'version.py')) + else os.path.join(package_folder_path, '_version.py'), 'r') as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', + fd.read(), re.MULTILINE).group(1) + +if not version: + raise RuntimeError('Cannot find version information') + +with open('README.md', encoding='utf-8') as f: + readme = f.read() +with open('CHANGELOG.md', encoding='utf-8') as f: + changelog = f.read() + +setup( + name=PACKAGE_NAME, + version=version, + description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), + long_description=readme + '\n\n' + changelog, + long_description_content_type='text/markdown', + license='MIT License', + author='Microsoft Corporation', + author_email='azpysdkhelp@microsoft.com', + url='https://github.com/Azure/azure-sdk-for-python', + classifiers=[ + 'Development Status :: 4 - Beta', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'License :: OSI Approved :: MIT License', + ], + zip_safe=False, + packages=find_packages(exclude=[ + 'tests', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + 'azure.mgmt', + ]), + install_requires=[ + 'msrest>=0.6.21', + 'azure-common~=1.1', + 'azure-mgmt-core>=1.2.0,<2.0.0', + ], + extras_require={ + ":python_version<'3.0'": ['azure-mgmt-nspkg'], + } +)