From 6ad26093c673e055bfc06d937611e2ebc478f604 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Mon, 31 Oct 2022 20:27:03 +0000 Subject: [PATCH] CodeGen from PR 21274 in Azure/azure-rest-api-specs Merge 54f5eabd695af9beb58cbbbfb7ef351e888375c1 into 5fc05d0f0b15cbf16de942cadce464b495c66a58 --- .../azure-mgmt-databricks/_meta.json | 12 +- .../azure/mgmt/databricks/__init__.py | 15 +- .../_azure_databricks_management_client.py | 138 +- .../azure/mgmt/databricks/_configuration.py | 63 +- .../azure/mgmt/databricks/_metadata.json | 107 - .../azure/mgmt/databricks/_patch.py | 20 + .../azure/mgmt/databricks/_serialization.py | 1970 +++++++++++++++ .../azure/mgmt/databricks/_vendor.py | 27 + .../azure/mgmt/databricks/_version.py | 2 +- .../azure/mgmt/databricks/aio/__init__.py | 15 +- .../_azure_databricks_management_client.py | 124 +- .../mgmt/databricks/aio/_configuration.py | 60 +- .../azure/mgmt/databricks/aio/_patch.py | 20 + .../databricks/aio/operations/__init__.py | 20 +- .../_access_connectors_operations.py | 861 +++++++ .../databricks/aio/operations/_operations.py | 140 +- ...twork_dependencies_endpoints_operations.py | 125 + .../mgmt/databricks/aio/operations/_patch.py | 20 + ...private_endpoint_connections_operations.py | 644 +++-- .../_private_link_resources_operations.py | 238 +- .../operations/_vnet_peering_operations.py | 638 +++-- .../aio/operations/_workspaces_operations.py | 938 ++++--- .../azure/mgmt/databricks/models/__init__.py | 270 +- ...zure_databricks_management_client_enums.py | 89 +- .../azure/mgmt/databricks/models/_models.py | 1430 ----------- .../mgmt/databricks/models/_models_py3.py | 2187 +++++++++++------ .../azure/mgmt/databricks/models/_patch.py | 20 + .../mgmt/databricks/operations/__init__.py | 20 +- .../_access_connectors_operations.py | 1058 ++++++++ .../mgmt/databricks/operations/_operations.py | 171 +- ...twork_dependencies_endpoints_operations.py | 163 ++ .../mgmt/databricks/operations/_patch.py | 20 + ...private_endpoint_connections_operations.py | 839 +++++-- .../_private_link_resources_operations.py | 324 ++- .../operations/_vnet_peering_operations.py | 818 ++++-- .../operations/_workspaces_operations.py | 1160 ++++++--- ..._customer_managed_key_(_cmk)_encryption.py | 48 + ...key_(_cmk)_encryption_for_managed_disks.py | 60 + ...te_an_azure_databricks_access_connector.py | 42 + .../create_or_update_workspace.py | 45 + ...update_workspace_with_custom_parameters.py | 54 + .../generated_samples/delete_a_workspace.py | 41 + .../delete_a_workspace_v_net_peering.py | 42 + ...te_an_azure_databricks_access_connector.py | 41 + ...kspace_which_is_prepared_for_encryption.py | 58 + .../get_a_private_endpoint_connection.py | 42 + .../get_a_private_link_resource.py | 42 + .../generated_samples/get_a_workspace.py | 41 + .../get_a_workspace_with_custom_parameters.py | 41 + ...key_(_cmk)_encryption_for_managed_disks.py | 41 + ...workspace_with_v_net_peering_configured.py | 42 + ...et_an_azure_databricks_access_connector.py | 41 + ...st_all_v_net_peerings_for_the_workspace.py | 42 + ...ork_dependencies_endpoints_by_workspace.py | 41 + .../list_private_endpoint_connections.py | 42 + .../list_private_link_resources.py | 42 + ...ccess_connectors_within_a_subscription..py | 39 + ...cess_connectors_within_a_resource_group.py | 41 + .../generated_samples/lists_workspaces.py | 41 + .../generated_samples/operations.py | 39 + .../remove_a_private_endpoint_connection.py | 42 + ..._managed_keys_encryption_on_a_workspace.py | 48 + .../update_a_private_endpoint_connection.py | 50 + .../update_a_workspace's_tags..py | 42 + ...key_(_cmk)_encryption_for_managed_disks.py | 61 + ...te_an_azure_databricks_access_connector.py | 42 + 66 files changed, 11343 insertions(+), 4756 deletions(-) delete mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_metadata.json create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_patch.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_vendor.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_patch.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_patch.py delete mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_patch.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py create mode 100644 sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_patch.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_which_is_ready_for_customer_managed_key_(_cmk)_encryption.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/create_an_azure_databricks_access_connector.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace_with_custom_parameters.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace_v_net_peering.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/delete_an_azure_databricks_access_connector.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/enable_customer_managed_key_(_cmk)_encryption_on_a_workspace_which_is_prepared_for_encryption.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_endpoint_connection.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_link_resource.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_custom_parameters.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_v_net_peering_configured.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/get_an_azure_databricks_access_connector.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/list_all_v_net_peerings_for_the_workspace.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/list_outbound_network_dependencies_endpoints_by_workspace.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/lists_all_the_azure_databricks_access_connectors_within_a_subscription..py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/lists_azure_databricks_access_connectors_within_a_resource_group.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/lists_workspaces.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/operations.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/remove_a_private_endpoint_connection.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/revert_customer_managed_key_(_cmk)_encryption_to_microsoft_managed_keys_encryption_on_a_workspace.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_private_endpoint_connection.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace's_tags..py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py create mode 100644 sdk/databricks/azure-mgmt-databricks/generated_samples/update_an_azure_databricks_access_connector.py diff --git a/sdk/databricks/azure-mgmt-databricks/_meta.json b/sdk/databricks/azure-mgmt-databricks/_meta.json index 226316ddd10d..eb4d510e7270 100644 --- a/sdk/databricks/azure-mgmt-databricks/_meta.json +++ b/sdk/databricks/azure-mgmt-databricks/_meta.json @@ -1,11 +1,11 @@ { - "autorest": "3.4.2", + "commit": "7a33e46be84cc4f5869b01a39d3eab2bb2b6befa", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest": "3.9.2", "use": [ - "@autorest/python@5.8.1", - "@autorest/modelerfour@4.19.2" + "@autorest/python@6.2.1", + "@autorest/modelerfour@4.24.3" ], - "commit": "fa3ba1acdd45ddad8950133befc5b0a6f1ee5163", - "repository_url": "https://github.com/Azure/azure-rest-api-specs", - "autorest_command": "autorest specification/databricks/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.1 --use=@autorest/modelerfour@4.19.2 --version=3.4.2", + "autorest_command": "autorest specification/databricks/resource-manager/readme.md --generate-sample=True --include-x-ms-examples-original-file=True --python --python-sdks-folder=/mnt/vss/_work/1/s/azure-sdk-for-python/sdk --use=@autorest/python@6.2.1 --use=@autorest/modelerfour@4.24.3 --version=3.9.2 --version-tolerant=False", "readme": "specification/databricks/resource-manager/readme.md" } \ No newline at end of file diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py index 00df8e78c17d..76c7067ca090 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/__init__.py @@ -10,10 +10,17 @@ from ._version import VERSION __version__ = VERSION -__all__ = ['AzureDatabricksManagementClient'] try: - from ._patch import patch_sdk # type: ignore - patch_sdk() + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import except ImportError: - pass + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureDatabricksManagementClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py index d0ddf4a60c74..c49b21565c00 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_azure_databricks_management_client.py @@ -6,94 +6,118 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from copy import deepcopy +from typing import Any, TYPE_CHECKING +from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient -from msrest import Deserializer, Serializer + +from . import models +from ._configuration import AzureDatabricksManagementClientConfiguration +from ._serialization import Deserializer, Serializer +from .operations import ( + AccessConnectorsOperations, + Operations, + OutboundNetworkDependenciesEndpointsOperations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, + VNetPeeringOperations, + WorkspacesOperations, +) if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Optional - from azure.core.credentials import TokenCredential - from azure.core.pipeline.transport import HttpRequest, HttpResponse - -from ._configuration import AzureDatabricksManagementClientConfiguration -from .operations import WorkspacesOperations -from .operations import Operations -from .operations import PrivateLinkResourcesOperations -from .operations import PrivateEndpointConnectionsOperations -from .operations import VNetPeeringOperations -from . import models -class AzureDatabricksManagementClient(object): - """The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace resources. +class AzureDatabricksManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes + """The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace + resources. :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure_databricks_management_client.operations.WorkspacesOperations + :vartype workspaces: azure.mgmt.databricks.operations.WorkspacesOperations :ivar operations: Operations operations - :vartype operations: azure_databricks_management_client.operations.Operations + :vartype operations: azure.mgmt.databricks.operations.Operations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure_databricks_management_client.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: + azure.mgmt.databricks.operations.PrivateLinkResourcesOperations :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: azure_databricks_management_client.operations.PrivateEndpointConnectionsOperations + :vartype private_endpoint_connections: + azure.mgmt.databricks.operations.PrivateEndpointConnectionsOperations + :ivar outbound_network_dependencies_endpoints: OutboundNetworkDependenciesEndpointsOperations + operations + :vartype outbound_network_dependencies_endpoints: + azure.mgmt.databricks.operations.OutboundNetworkDependenciesEndpointsOperations :ivar vnet_peering: VNetPeeringOperations operations - :vartype vnet_peering: azure_databricks_management_client.operations.VNetPeeringOperations - :param credential: Credential needed for the client to connect to Azure. + :vartype vnet_peering: azure.mgmt.databricks.operations.VNetPeeringOperations + :ivar access_connectors: AccessConnectorsOperations operations + :vartype access_connectors: azure.mgmt.databricks.operations.AccessConnectorsOperations + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-04-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> None - if not base_url: - base_url = 'https://management.azure.com' - self._config = AzureDatabricksManagementClientConfiguration(credential, subscription_id, **kwargs) + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AzureDatabricksManagementClientConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - - self.workspaces = WorkspacesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) + self._serialize.client_side_validation = False + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.vnet_peering = VNetPeeringOperations( - self._client, self._config, self._serialize, self._deserialize) - - def _send_request(self, http_request, **kwargs): - # type: (HttpRequest, Any) -> HttpResponse + self._client, self._config, self._serialize, self._deserialize + ) + self.outbound_network_dependencies_endpoints = OutboundNetworkDependenciesEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.vnet_peering = VNetPeeringOperations(self._client, self._config, self._serialize, self._deserialize) + self.access_connectors = AccessConnectorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. - :param http_request: The network request you want to make. Required. - :type http_request: ~azure.core.pipeline.transport.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.pipeline.transport.HttpResponse + :rtype: ~azure.core.rest.HttpResponse """ - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - http_request.url = self._client.format_url(http_request.url, **path_format_arguments) - stream = kwargs.pop("stream", True) - pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs) - return pipeline_response.http_response + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) def close(self): # type: () -> None diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py index 37c43da379a8..2c7d1b9d7f57 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_configuration.py @@ -6,65 +6,70 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +import sys +from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any - from azure.core.credentials import TokenCredential -class AzureDatabricksManagementClientConfiguration(Configuration): +class AzureDatabricksManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for AzureDatabricksManagementClient. Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-04-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop("api_version", "2022-04-01-preview") # type: Literal["2022-04-01-preview"] + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-databricks/{}'.format(VERSION)) + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-databricks/{}".format(VERSION)) self._configure(**kwargs) def _configure( - self, - **kwargs # type: Any + self, **kwargs # type: Any ): # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_metadata.json b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_metadata.json deleted file mode 100644 index 4f32e60012df..000000000000 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_metadata.json +++ /dev/null @@ -1,107 +0,0 @@ -{ - "chosen_version": "", - "total_api_version_list": ["2018-04-01", "2021-04-01-preview"], - "client": { - "name": "AzureDatabricksManagementClient", - "filename": "_azure_databricks_management_client", - "description": "The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace resources.", - "base_url": "\u0027https://management.azure.com\u0027", - "custom_base_url": null, - "azure_arm": true, - "has_lro_operations": true, - "client_side_validation": false, - "sync_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"ARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"AzureDatabricksManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"HttpRequest\", \"HttpResponse\"]}}}", - "async_imports": "{\"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}, \"regular\": {\"azurecore\": {\"azure.profiles\": [\"KnownProfiles\", \"ProfileDefinition\"], \"azure.profiles.multiapiclient\": [\"MultiApiClientMixin\"], \"msrest\": [\"Deserializer\", \"Serializer\"], \"azure.mgmt.core\": [\"AsyncARMPipelineClient\"]}, \"local\": {\"._configuration\": [\"AzureDatabricksManagementClientConfiguration\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\", \"Optional\"]}, \"azurecore\": {\"azure.core.pipeline.transport\": [\"AsyncHttpResponse\", \"HttpRequest\"]}}}" - }, - "global_parameters": { - "sync": { - "credential": { - "signature": "credential, # type: \"TokenCredential\"", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials.TokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id, # type: str", - "description": "The ID of the target subscription.", - "docstring_type": "str", - "required": true - } - }, - "async": { - "credential": { - "signature": "credential: \"AsyncTokenCredential\",", - "description": "Credential needed for the client to connect to Azure.", - "docstring_type": "~azure.core.credentials_async.AsyncTokenCredential", - "required": true - }, - "subscription_id": { - "signature": "subscription_id: str,", - "description": "The ID of the target subscription.", - "docstring_type": "str", - "required": true - } - }, - "constant": { - }, - "call": "credential, subscription_id", - "service_client_specific": { - "sync": { - "api_version": { - "signature": "api_version=None, # type: Optional[str]", - "description": "API version to use if no profile is provided, or if missing in profile.", - "docstring_type": "str", - "required": false - }, - "base_url": { - "signature": "base_url=None, # type: Optional[str]", - "description": "Service URL", - "docstring_type": "str", - "required": false - }, - "profile": { - "signature": "profile=KnownProfiles.default, # type: KnownProfiles", - "description": "A profile definition, from KnownProfiles to dict.", - "docstring_type": "azure.profiles.KnownProfiles", - "required": false - } - }, - "async": { - "api_version": { - "signature": "api_version: Optional[str] = None,", - "description": "API version to use if no profile is provided, or if missing in profile.", - "docstring_type": "str", - "required": false - }, - "base_url": { - "signature": "base_url: Optional[str] = None,", - "description": "Service URL", - "docstring_type": "str", - "required": false - }, - "profile": { - "signature": "profile: KnownProfiles = KnownProfiles.default,", - "description": "A profile definition, from KnownProfiles to dict.", - "docstring_type": "azure.profiles.KnownProfiles", - "required": false - } - } - } - }, - "config": { - "credential": true, - "credential_scopes": ["https://management.azure.com/.default"], - "credential_default_policy_type": "BearerTokenCredentialPolicy", - "credential_default_policy_type_has_async_version": true, - "credential_key_header_name": null, - "sync_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\"._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials\": [\"TokenCredential\"]}}}", - "async_imports": "{\"regular\": {\"azurecore\": {\"azure.core.configuration\": [\"Configuration\"], \"azure.core.pipeline\": [\"policies\"], \"azure.mgmt.core.policies\": [\"ARMHttpLoggingPolicy\"]}, \"local\": {\".._version\": [\"VERSION\"]}}, \"conditional\": {\"stdlib\": {\"typing\": [\"Any\"]}}, \"typing\": {\"azurecore\": {\"azure.core.credentials_async\": [\"AsyncTokenCredential\"]}}}" - }, - "operation_groups": { - "workspaces": "WorkspacesOperations", - "operations": "Operations", - "private_link_resources": "PrivateLinkResourcesOperations", - "private_endpoint_connections": "PrivateEndpointConnectionsOperations", - "vnet_peering": "VNetPeeringOperations" - } -} \ No newline at end of file diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_patch.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py new file mode 100644 index 000000000000..7c1dedb5133d --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_serialization.py @@ -0,0 +1,1970 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote # type: ignore +import xml.etree.ElementTree as ET + +import isodate + +from typing import Dict, Any, cast, TYPE_CHECKING + +from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +if TYPE_CHECKING: + from typing import Optional, Union, AnyStr, IO, Mapping + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data, content_type=None): + # type: (Optional[Union[AnyStr, IO]], Optional[str]) -> Any + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes, headers): + # type: (Optional[Union[AnyStr, IO]], Mapping) -> Any + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str # type: ignore + unicode_str = str # type: ignore + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc # type: ignore +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes=None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) + continue + if xml_desc.get("text", False): + serialized.text = new_attr + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) + else: # JSON + for k in reversed(keys): + unflattened = {k: new_attr} + new_attr = unflattened + + _new_attr = new_attr + _serialized = serialized + for k in keys: + if k not in _serialized: + _serialized.update(_new_attr) + _new_attr = _new_attr[k] + _serialized = _serialized[k] + except ValueError: + continue + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + data = [self.serialize_data(d, internal_data_type, **kwargs) if d is not None else "" for d in data] + if not kwargs.get("skip_quote", False): + data = [quote(str(d), safe="") for d in data] + return str(self.serialize_iter(data, internal_data_type, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback(SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError: + serialized.append(None) + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) + return result + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes=None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deseralize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) + attr = attr + padding + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_vendor.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_vendor.py new file mode 100644 index 000000000000..9aad73fc743e --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_vendor.py @@ -0,0 +1,27 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.pipeline.transport import HttpRequest + + +def _convert_request(request, files=None): + data = request.content if not files else None + request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) + if files: + request.set_formdata_body(files) + return request + + +def _format_url_section(template, **kwargs): + components = template.split("/") + while components: + try: + return template.format(**kwargs) + except KeyError as key: + formatted_components = template.split("/") + components = [c for c in formatted_components if "{}".format(key.args[0]) not in c] + template = "/".join(components) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py index 653b73a4a199..e5754a47ce68 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "1.1.0b1" +VERSION = "1.0.0b1" diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py index 4d341ec3f5c4..1ccf713f602a 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/__init__.py @@ -7,4 +7,17 @@ # -------------------------------------------------------------------------- from ._azure_databricks_management_client import AzureDatabricksManagementClient -__all__ = ['AzureDatabricksManagementClient'] + +try: + from ._patch import __all__ as _patch_all + from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureDatabricksManagementClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py index 8376d40c3e1e..c1570ce8f491 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_azure_databricks_management_client.py @@ -6,90 +6,118 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Optional, TYPE_CHECKING +from copy import deepcopy +from typing import Any, Awaitable, TYPE_CHECKING -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient -from msrest import Deserializer, Serializer + +from .. import models +from .._serialization import Deserializer, Serializer +from ._configuration import AzureDatabricksManagementClientConfiguration +from .operations import ( + AccessConnectorsOperations, + Operations, + OutboundNetworkDependenciesEndpointsOperations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, + VNetPeeringOperations, + WorkspacesOperations, +) if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -from ._configuration import AzureDatabricksManagementClientConfiguration -from .operations import WorkspacesOperations -from .operations import Operations -from .operations import PrivateLinkResourcesOperations -from .operations import PrivateEndpointConnectionsOperations -from .operations import VNetPeeringOperations -from .. import models - -class AzureDatabricksManagementClient(object): - """The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace resources. +class AzureDatabricksManagementClient: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes + """The Microsoft Azure management APIs allow end users to operate on Azure Databricks Workspace + resources. :ivar workspaces: WorkspacesOperations operations - :vartype workspaces: azure_databricks_management_client.aio.operations.WorkspacesOperations + :vartype workspaces: azure.mgmt.databricks.aio.operations.WorkspacesOperations :ivar operations: Operations operations - :vartype operations: azure_databricks_management_client.aio.operations.Operations + :vartype operations: azure.mgmt.databricks.aio.operations.Operations :ivar private_link_resources: PrivateLinkResourcesOperations operations - :vartype private_link_resources: azure_databricks_management_client.aio.operations.PrivateLinkResourcesOperations + :vartype private_link_resources: + azure.mgmt.databricks.aio.operations.PrivateLinkResourcesOperations :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations - :vartype private_endpoint_connections: azure_databricks_management_client.aio.operations.PrivateEndpointConnectionsOperations + :vartype private_endpoint_connections: + azure.mgmt.databricks.aio.operations.PrivateEndpointConnectionsOperations + :ivar outbound_network_dependencies_endpoints: OutboundNetworkDependenciesEndpointsOperations + operations + :vartype outbound_network_dependencies_endpoints: + azure.mgmt.databricks.aio.operations.OutboundNetworkDependenciesEndpointsOperations :ivar vnet_peering: VNetPeeringOperations operations - :vartype vnet_peering: azure_databricks_management_client.aio.operations.VNetPeeringOperations - :param credential: Credential needed for the client to connect to Azure. + :vartype vnet_peering: azure.mgmt.databricks.aio.operations.VNetPeeringOperations + :ivar access_connectors: AccessConnectorsOperations operations + :vartype access_connectors: azure.mgmt.databricks.aio.operations.AccessConnectorsOperations + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :param str base_url: Service URL - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :param base_url: Service URL. Default value is "https://management.azure.com". + :type base_url: str + :keyword api_version: Api Version. Default value is "2022-04-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, - base_url: Optional[str] = None, + base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - if not base_url: - base_url = 'https://management.azure.com' - self._config = AzureDatabricksManagementClientConfiguration(credential, subscription_id, **kwargs) + self._config = AzureDatabricksManagementClientConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) - self._serialize.client_side_validation = False self._deserialize = Deserializer(client_models) - - self.workspaces = WorkspacesOperations( - self._client, self._config, self._serialize, self._deserialize) - self.operations = Operations( - self._client, self._config, self._serialize, self._deserialize) + self._serialize.client_side_validation = False + self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.private_link_resources = PrivateLinkResourcesOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) self.private_endpoint_connections = PrivateEndpointConnectionsOperations( - self._client, self._config, self._serialize, self._deserialize) - self.vnet_peering = VNetPeeringOperations( - self._client, self._config, self._serialize, self._deserialize) + self._client, self._config, self._serialize, self._deserialize + ) + self.outbound_network_dependencies_endpoints = OutboundNetworkDependenciesEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.vnet_peering = VNetPeeringOperations(self._client, self._config, self._serialize, self._deserialize) + self.access_connectors = AccessConnectorsOperations( + self._client, self._config, self._serialize, self._deserialize + ) - async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse: + def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. - :param http_request: The network request you want to make. Required. - :type http_request: ~azure.core.pipeline.transport.HttpRequest - :keyword bool stream: Whether the response payload will be streamed. Defaults to True. + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client._send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. - :rtype: ~azure.core.pipeline.transport.AsyncHttpResponse + :rtype: ~azure.core.rest.AsyncHttpResponse """ - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - http_request.url = self._client.format_url(http_request.url, **path_format_arguments) - stream = kwargs.pop("stream", True) - pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs) - return pipeline_response.http_response + + request_copy = deepcopy(request) + request_copy.url = self._client.format_url(request_copy.url) + return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py index a06bbe6c18f7..529664ce722d 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_configuration.py @@ -6,61 +6,67 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import sys from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies -from azure.mgmt.core.policies import ARMHttpLoggingPolicy +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy from .._version import VERSION +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports + if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -class AzureDatabricksManagementClientConfiguration(Configuration): +class AzureDatabricksManagementClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes """Configuration for AzureDatabricksManagementClient. Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str + :keyword api_version: Api Version. Default value is "2022-04-01-preview". Note that overriding + this default value may result in unsupported behavior. + :paramtype api_version: str """ - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: + def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs) + api_version = kwargs.pop("api_version", "2022-04-01-preview") # type: Literal["2022-04-01-preview"] + if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") - super(AzureDatabricksManagementClientConfiguration, self).__init__(**kwargs) self.credential = credential self.subscription_id = subscription_id - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-databricks/{}'.format(VERSION)) + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-databricks/{}".format(VERSION)) self._configure(**kwargs) - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_patch.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py index 6fc97620650a..6de22f808b13 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/__init__.py @@ -10,12 +10,22 @@ from ._operations import Operations from ._private_link_resources_operations import PrivateLinkResourcesOperations from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._outbound_network_dependencies_endpoints_operations import OutboundNetworkDependenciesEndpointsOperations from ._vnet_peering_operations import VNetPeeringOperations +from ._access_connectors_operations import AccessConnectorsOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'WorkspacesOperations', - 'Operations', - 'PrivateLinkResourcesOperations', - 'PrivateEndpointConnectionsOperations', - 'VNetPeeringOperations', + "WorkspacesOperations", + "Operations", + "PrivateLinkResourcesOperations", + "PrivateEndpointConnectionsOperations", + "OutboundNetworkDependenciesEndpointsOperations", + "VNetPeeringOperations", + "AccessConnectorsOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py new file mode 100644 index 000000000000..baccfd2365ea --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_access_connectors_operations.py @@ -0,0 +1,861 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import sys +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._access_connectors_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_resource_group_request, + build_list_by_subscription_request, + build_update_request, +) + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class AccessConnectorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.aio.AzureDatabricksManagementClient`'s + :attr:`access_connectors` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> _models.AccessConnector: + """Gets an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessConnector or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.AccessConnector + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + + request = build_get_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, connector_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Deletes the azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + connector_name=connector_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnector, IO], + **kwargs: Any + ) -> _models.AccessConnector: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "AccessConnector") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + connector_name: str, + parameters: _models.AccessConnector, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AccessConnector]: + """Creates or updates azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: Parameters supplied to the create or update an azure databricks + accessConnector. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnector + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + connector_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AccessConnector]: + """Creates or updates azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: Parameters supplied to the create or update an azure databricks + accessConnector. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnector, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.AccessConnector]: + """Creates or updates azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: Parameters supplied to the create or update an azure databricks + accessConnector. Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnector or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + connector_name=connector_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AccessConnector", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + async def _update_initial( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnectorUpdate, IO], + **kwargs: Any + ) -> Optional[_models.AccessConnector]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.AccessConnector]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "AccessConnectorUpdate") + + request = build_update_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + connector_name: str, + parameters: _models.AccessConnectorUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AccessConnector]: + """Updates an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: The update to the azure databricks accessConnector. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + connector_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.AccessConnector]: + """Updates an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: The update to the azure databricks accessConnector. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnectorUpdate, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.AccessConnector]: + """Updates an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: The update to the azure databricks accessConnector. Is either a model type + or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = await self._update_initial( # type: ignore + resource_group_name=resource_group_name, + connector_name=connector_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AccessConnector", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @distributed_trace + def list_by_resource_group( + self, resource_group_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AccessConnector"]: + """Gets all the azure databricks accessConnectors within a resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AccessConnector or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnectorListResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("AccessConnectorListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors"} # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.AccessConnector"]: + """Gets all the azure databricks accessConnectors within a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AccessConnector or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnectorListResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize("AccessConnectorListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/accessConnectors"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py index 9a3c8f881553..9d92ea7c716a 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,82 +6,114 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +import sys +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._operations import build_list_request + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class Operations: - """Operations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.aio.AzureDatabricksManagementClient`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def list( - self, - **kwargs: Any - ) -> AsyncIterable["_models.OperationListResult"]: + @distributed_trace + def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: """Lists all of the available RP operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.OperationListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Operation or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - request = self._client.get(url, query_parameters, header_parameters) + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -89,17 +122,18 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.Databricks/operations'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + + list.metadata = {"url": "/providers/Microsoft.Databricks/operations"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py new file mode 100644 index 000000000000..33bb2600e7ce --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_outbound_network_dependencies_endpoints_operations.py @@ -0,0 +1,125 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import sys +from typing import Any, Callable, Dict, List, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from ... import models as _models +from ..._vendor import _convert_request +from ...operations._outbound_network_dependencies_endpoints_operations import build_list_request + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + + +class OutboundNetworkDependenciesEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.aio.AzureDatabricksManagementClient`'s + :attr:`outbound_network_dependencies_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> List[_models.OutboundEnvironmentEndpoint]: + """Gets a list of egress endpoints (network endpoints of all outbound dependencies) in the + specified Workspace. + + Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. + You must configure outbound access with these endpoints. For more information, see + https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of OutboundEnvironmentEndpoint or the result of cls(response) + :rtype: list[~azure.mgmt.databricks.models.OutboundEnvironmentEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[List[_models.OutboundEnvironmentEndpoint]] + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("[OutboundEnvironmentEndpoint]", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_patch.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py index 291c6ec83a02..562a06764e07 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_endpoint_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,98 +6,136 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings +import sys +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._private_endpoint_connections_operations import ( + build_create_request, + build_delete_request, + build_get_request, + build_list_request, +) + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PrivateEndpointConnectionsOperations: - """PrivateEndpointConnectionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.aio.AzureDatabricksManagementClient`'s + :attr:`private_endpoint_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.PrivateEndpointConnectionsList"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnection"]: """List private endpoint connections. List private endpoint connections of the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionsList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.PrivateEndpointConnectionsList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either PrivateEndpointConnection or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionsList"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionsList] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateEndpointConnectionsList', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionsList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -105,338 +144,455 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + + @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs: Any - ) -> "_models.PrivateEndpointConnection": + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Get private endpoint connection. Get a private endpoint connection properties for a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection. + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore async def _create_initial( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - private_endpoint_connection: "_models.PrivateEndpointConnection", + private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO], **kwargs: Any - ) -> "_models.PrivateEndpointConnection": - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + ) -> _models.PrivateEndpointConnection: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(private_endpoint_connection, 'PrivateEndpointConnection') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(private_endpoint_connection, (IO, bytes)): + _content = private_endpoint_connection + else: + _json = self._serialize.body(private_endpoint_connection, "PrivateEndpointConnection") + + request = build_create_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if response.status_code == 202: - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + @overload async def begin_create( self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - private_endpoint_connection: "_models.PrivateEndpointConnection", + private_endpoint_connection: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.PrivateEndpointConnection"]: + ) -> AsyncLROPoller[_models.PrivateEndpointConnection]: """Update private endpoint connection status. Update the status of a private endpoint connection with the specified name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection. + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. :type private_endpoint_connection_name: str :param private_endpoint_connection: The private endpoint connection with updated properties. - :type private_endpoint_connection: ~azure_databricks_management_client.models.PrivateEndpointConnection + Required. + :type private_endpoint_connection: ~azure.mgmt.databricks.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.PrivateEndpointConnection] + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_create( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PrivateEndpointConnection]: + """Update private endpoint connection status. + + Update the status of a private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_connection: The private endpoint connection with updated properties. + Required. + :type private_endpoint_connection: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.PrivateEndpointConnection]: + """Update private endpoint connection status. + + Update the status of a private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_connection: The private endpoint connection with updated properties. Is + either a model type or a IO type. Required. + :type private_endpoint_connection: ~azure.mgmt.databricks.models.PrivateEndpointConnection or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PrivateEndpointConnection or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._create_initial( + raw_result = await self._create_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, private_endpoint_connection=private_endpoint_connection, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) - + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - async def _delete_initial( - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs: Any + begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + @distributed_trace_async async def begin_delete( - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Remove private endpoint connection. Remove private endpoint connection with the specified name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection. + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py index d8d70c948f4f..71a9ab0f0bd8 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_private_link_resources_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,96 +6,128 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar -import warnings +import sys +from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._private_link_resources_operations import build_get_request, build_list_request + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PrivateLinkResourcesOperations: - """PrivateLinkResourcesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.aio.AzureDatabricksManagementClient`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.PrivateLinkResourcesList"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.GroupIdInformation"]: """List private link resources. List private link resources for a given workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateLinkResourcesList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.PrivateLinkResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either GroupIdInformation or the result of cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.GroupIdInformation] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesList"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourcesList] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateLinkResourcesList', pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -103,82 +136,87 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources"} # type: ignore + + @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - group_id: str, - **kwargs: Any - ) -> "_models.GroupIdInformation": + self, resource_group_name: str, workspace_name: str, group_id: str, **kwargs: Any + ) -> _models.GroupIdInformation: """Get the specified private link resource. Get the specified private link resource for the given group id (sub-resource). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param group_id: The name of the private link resource. + :param group_id: The name of the private link resource. Required. :type group_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GroupIdInformation, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.GroupIdInformation - :raises: ~azure.core.exceptions.HttpResponseError + :return: GroupIdInformation or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.GroupIdInformation + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.GroupIdInformation"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'groupId': self._serialize.url("group_id", group_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GroupIdInformation] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + group_id=group_id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('GroupIdInformation', pipeline_response) + deserialized = self._deserialize("GroupIdInformation", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}'} # type: ignore + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py index 277be5245873..f9089e1f41c8 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_vnet_peering_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,414 +6,560 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings +import sys +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._vnet_peering_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_workspace_request, +) + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class VNetPeeringOperations: - """VNetPeeringOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class VNetPeeringOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.aio.AzureDatabricksManagementClient`'s + :attr:`vnet_peering` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - peering_name: str, - **kwargs: Any - ) -> Optional["_models.VirtualNetworkPeering"]: + self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any + ) -> Optional[_models.VirtualNetworkPeering]: """Gets the workspace vNet Peering. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param peering_name: The name of the workspace vNet peering. + :param peering_name: The name of the workspace vNet peering. Required. :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualNetworkPeering, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.VirtualNetworkPeering or None - :raises: ~azure.core.exceptions.HttpResponseError + :return: VirtualNetworkPeering or None or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.VirtualNetworkPeering or None + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkPeering"]] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.VirtualNetworkPeering]] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + peering_name=peering_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore - async def _delete_initial( - self, - resource_group_name: str, - workspace_name: str, - peering_name: str, - **kwargs: Any + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + peering_name=peering_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + @distributed_trace_async async def begin_delete( - self, - resource_group_name: str, - workspace_name: str, - peering_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes the workspace vNetPeering. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param peering_name: The name of the workspace vNet peering. + :param peering_name: The name of the workspace vNet peering. Required. :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, peering_name=peering_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, peering_name: str, - virtual_network_peering_parameters: "_models.VirtualNetworkPeering", + virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO], **kwargs: Any - ) -> "_models.VirtualNetworkPeering": - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"] + ) -> _models.VirtualNetworkPeering: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualNetworkPeering] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(virtual_network_peering_parameters, (IO, bytes)): + _content = virtual_network_peering_parameters + else: + _json = self._serialize.body(virtual_network_peering_parameters, "VirtualNetworkPeering") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + peering_name=peering_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + + @overload async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, peering_name: str, - virtual_network_peering_parameters: "_models.VirtualNetworkPeering", + virtual_network_peering_parameters: _models.VirtualNetworkPeering, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.VirtualNetworkPeering"]: + ) -> AsyncLROPoller[_models.VirtualNetworkPeering]: """Creates vNet Peering for workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param peering_name: The name of the workspace vNet peering. + :param peering_name: The name of the workspace vNet peering. Required. :type peering_name: str :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet - Peering. - :type virtual_network_peering_parameters: ~azure_databricks_management_client.models.VirtualNetworkPeering + Peering. Required. + :type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.VirtualNetworkPeering] + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result + of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + peering_name: str, + virtual_network_peering_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.VirtualNetworkPeering]: + """Creates vNet Peering for workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param peering_name: The name of the workspace vNet peering. Required. + :type peering_name: str + :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet + Peering. Required. + :type virtual_network_peering_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result + of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + peering_name: str, + virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.VirtualNetworkPeering]: + """Creates vNet Peering for workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param peering_name: The name of the workspace vNet peering. Required. + :type peering_name: str + :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet + Peering. Is either a model type or a IO type. Required. + :type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result + of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualNetworkPeering] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._create_or_update_initial( + raw_result = await self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, peering_name=peering_name, virtual_network_peering_parameters=virtual_network_peering_parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) - + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + @distributed_trace def list_by_workspace( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.VirtualNetworkPeeringList"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.VirtualNetworkPeering"]: """Lists the workspace vNet Peerings. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either VirtualNetworkPeeringList or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.VirtualNetworkPeeringList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either VirtualNetworkPeering or the result of + cls(response) + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringList"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualNetworkPeeringList] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_workspace.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('VirtualNetworkPeeringList', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeeringList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -421,17 +568,18 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + + list_by_workspace.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py index a95a73ba1a34..d2ba8fbd0f08 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/aio/operations/_workspaces_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,516 +6,741 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union -import warnings +import sys +from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models - -T = TypeVar('T') +from ..._vendor import _convert_request +from ...operations._workspaces_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_resource_group_request, + build_list_by_subscription_request, + build_update_request, +) + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class WorkspacesOperations: - """WorkspacesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.aio.AzureDatabricksManagementClient`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - async def get( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> "_models.Workspace": + @distributed_trace_async + async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: """Gets the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError + :return: Workspace or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore - async def _delete_initial( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + async def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore - async def begin_delete( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncLROPoller[None]: + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> AsyncLROPoller[None]: """Deletes the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore async def _create_or_update_initial( - self, - resource_group_name: str, - workspace_name: str, - parameters: "_models.Workspace", - **kwargs: Any - ) -> "_models.Workspace": - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> _models.Workspace: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'Workspace') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "Workspace") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + @overload async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - parameters: "_models.Workspace", + parameters: _models.Workspace, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.Workspace"]: + ) -> AsyncLROPoller[_models.Workspace]: """Creates a new workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: Parameters supplied to the create or update a workspace. - :type parameters: ~azure_databricks_management_client.models.Workspace + :param parameters: Parameters supplied to the create or update a workspace. Required. + :type parameters: ~azure.mgmt.databricks.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.Workspace] + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Creates a new workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: Parameters supplied to the create or update a workspace. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Creates a new workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: Parameters supplied to the create or update a workspace. Is either a model + type or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.Workspace or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._create_or_update_initial( + raw_result = await self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('Workspace', pipeline_response) - + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore async def _update_initial( self, resource_group_name: str, workspace_name: str, - parameters: "_models.WorkspaceUpdate", + parameters: Union[_models.WorkspaceUpdate, IO], **kwargs: Any - ) -> Optional["_models.Workspace"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceUpdate') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Workspace]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "WorkspaceUpdate") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + @overload async def begin_update( self, resource_group_name: str, workspace_name: str, - parameters: "_models.WorkspaceUpdate", + parameters: _models.WorkspaceUpdate, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.Workspace"]: + ) -> AsyncLROPoller[_models.Workspace]: """Updates a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: The update to the workspace. - :type parameters: ~azure_databricks_management_client.models.WorkspaceUpdate + :param parameters: The update to the workspace. Required. + :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be AsyncARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of AsyncLROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.AsyncLROPoller[~azure_databricks_management_client.models.Workspace] + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: The update to the workspace. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.WorkspaceUpdate, IO], + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: The update to the workspace. Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = await self._update_initial( + raw_result = await self._update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('Workspace', pipeline_response) - + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - - if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) # type: AsyncPollingMethod + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) - def list_by_resource_group( - self, - resource_group_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.WorkspaceListResult"]: + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: """Gets all the workspaces within a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -523,65 +749,80 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces'} # type: ignore + return AsyncItemPaged(get_next, extract_data) - def list_by_subscription( - self, - **kwargs: Any - ) -> AsyncIterable["_models.WorkspaceListResult"]: + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces"} # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Workspace"]: """Gets all the workspaces within a subscription. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request async def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -590,17 +831,18 @@ async def extract_data(pipeline_response): async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return AsyncItemPaged( - get_next, extract_data - ) - list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces'} # type: ignore + return AsyncItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py index 4645c40c923c..af81b9660f83 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/__init__.py @@ -6,150 +6,134 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -try: - from ._models_py3 import AddressSpace - from ._models_py3 import CreatedBy - from ._models_py3 import Encryption - from ._models_py3 import EncryptionEntitiesDefinition - from ._models_py3 import EncryptionV2 - from ._models_py3 import EncryptionV2KeyVaultProperties - from ._models_py3 import ErrorDetail - from ._models_py3 import ErrorInfo - from ._models_py3 import ErrorResponse - from ._models_py3 import GroupIdInformation - from ._models_py3 import GroupIdInformationProperties - from ._models_py3 import ManagedIdentityConfiguration - from ._models_py3 import Operation - from ._models_py3 import OperationDisplay - from ._models_py3 import OperationListResult - from ._models_py3 import PrivateEndpoint - from ._models_py3 import PrivateEndpointConnection - from ._models_py3 import PrivateEndpointConnectionProperties - from ._models_py3 import PrivateEndpointConnectionsList - from ._models_py3 import PrivateLinkResourcesList - from ._models_py3 import PrivateLinkServiceConnectionState - from ._models_py3 import Resource - from ._models_py3 import Sku - from ._models_py3 import SystemData - from ._models_py3 import TrackedResource - from ._models_py3 import VirtualNetworkPeering - from ._models_py3 import VirtualNetworkPeeringList - from ._models_py3 import VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork - from ._models_py3 import VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork - from ._models_py3 import Workspace - from ._models_py3 import WorkspaceCustomBooleanParameter - from ._models_py3 import WorkspaceCustomObjectParameter - from ._models_py3 import WorkspaceCustomParameters - from ._models_py3 import WorkspaceCustomStringParameter - from ._models_py3 import WorkspaceEncryptionParameter - from ._models_py3 import WorkspaceListResult - from ._models_py3 import WorkspacePropertiesEncryption - from ._models_py3 import WorkspaceProviderAuthorization - from ._models_py3 import WorkspaceUpdate -except (SyntaxError, ImportError): - from ._models import AddressSpace # type: ignore - from ._models import CreatedBy # type: ignore - from ._models import Encryption # type: ignore - from ._models import EncryptionEntitiesDefinition # type: ignore - from ._models import EncryptionV2 # type: ignore - from ._models import EncryptionV2KeyVaultProperties # type: ignore - from ._models import ErrorDetail # type: ignore - from ._models import ErrorInfo # type: ignore - from ._models import ErrorResponse # type: ignore - from ._models import GroupIdInformation # type: ignore - from ._models import GroupIdInformationProperties # type: ignore - from ._models import ManagedIdentityConfiguration # type: ignore - from ._models import Operation # type: ignore - from ._models import OperationDisplay # type: ignore - from ._models import OperationListResult # type: ignore - from ._models import PrivateEndpoint # type: ignore - from ._models import PrivateEndpointConnection # type: ignore - from ._models import PrivateEndpointConnectionProperties # type: ignore - from ._models import PrivateEndpointConnectionsList # type: ignore - from ._models import PrivateLinkResourcesList # type: ignore - from ._models import PrivateLinkServiceConnectionState # type: ignore - from ._models import Resource # type: ignore - from ._models import Sku # type: ignore - from ._models import SystemData # type: ignore - from ._models import TrackedResource # type: ignore - from ._models import VirtualNetworkPeering # type: ignore - from ._models import VirtualNetworkPeeringList # type: ignore - from ._models import VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork # type: ignore - from ._models import VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork # type: ignore - from ._models import Workspace # type: ignore - from ._models import WorkspaceCustomBooleanParameter # type: ignore - from ._models import WorkspaceCustomObjectParameter # type: ignore - from ._models import WorkspaceCustomParameters # type: ignore - from ._models import WorkspaceCustomStringParameter # type: ignore - from ._models import WorkspaceEncryptionParameter # type: ignore - from ._models import WorkspaceListResult # type: ignore - from ._models import WorkspacePropertiesEncryption # type: ignore - from ._models import WorkspaceProviderAuthorization # type: ignore - from ._models import WorkspaceUpdate # type: ignore +from ._models_py3 import AccessConnector +from ._models_py3 import AccessConnectorListResult +from ._models_py3 import AccessConnectorProperties +from ._models_py3 import AccessConnectorUpdate +from ._models_py3 import AddressSpace +from ._models_py3 import CreatedBy +from ._models_py3 import Encryption +from ._models_py3 import EncryptionEntitiesDefinition +from ._models_py3 import EncryptionV2 +from ._models_py3 import EncryptionV2KeyVaultProperties +from ._models_py3 import EndpointDependency +from ._models_py3 import EndpointDetail +from ._models_py3 import ErrorDetail +from ._models_py3 import ErrorInfo +from ._models_py3 import ErrorResponse +from ._models_py3 import GroupIdInformation +from ._models_py3 import GroupIdInformationProperties +from ._models_py3 import IdentityData +from ._models_py3 import ManagedDiskEncryption +from ._models_py3 import ManagedDiskEncryptionKeyVaultProperties +from ._models_py3 import ManagedIdentityConfiguration +from ._models_py3 import Operation +from ._models_py3 import OperationDisplay +from ._models_py3 import OperationListResult +from ._models_py3 import OutboundEnvironmentEndpoint +from ._models_py3 import PrivateEndpoint +from ._models_py3 import PrivateEndpointConnection +from ._models_py3 import PrivateEndpointConnectionProperties +from ._models_py3 import PrivateEndpointConnectionsList +from ._models_py3 import PrivateLinkResourcesList +from ._models_py3 import PrivateLinkServiceConnectionState +from ._models_py3 import Resource +from ._models_py3 import Sku +from ._models_py3 import SystemData +from ._models_py3 import TrackedResource +from ._models_py3 import VirtualNetworkPeering +from ._models_py3 import VirtualNetworkPeeringList +from ._models_py3 import VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork +from ._models_py3 import VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork +from ._models_py3 import Workspace +from ._models_py3 import WorkspaceCustomBooleanParameter +from ._models_py3 import WorkspaceCustomObjectParameter +from ._models_py3 import WorkspaceCustomParameters +from ._models_py3 import WorkspaceCustomStringParameter +from ._models_py3 import WorkspaceEncryptionParameter +from ._models_py3 import WorkspaceListResult +from ._models_py3 import WorkspacePropertiesEncryption +from ._models_py3 import WorkspaceProviderAuthorization +from ._models_py3 import WorkspaceUpdate -from ._azure_databricks_management_client_enums import ( - CreatedByType, - CustomParameterType, - EncryptionKeySource, - KeySource, - PeeringProvisioningState, - PeeringState, - PrivateEndpointConnectionProvisioningState, - PrivateLinkServiceConnectionStatus, - ProvisioningState, - PublicNetworkAccess, - RequiredNsgRules, -) +from ._azure_databricks_management_client_enums import CreatedByType +from ._azure_databricks_management_client_enums import CustomParameterType +from ._azure_databricks_management_client_enums import EncryptionKeySource +from ._azure_databricks_management_client_enums import IdentityType +from ._azure_databricks_management_client_enums import KeySource +from ._azure_databricks_management_client_enums import PeeringProvisioningState +from ._azure_databricks_management_client_enums import PeeringState +from ._azure_databricks_management_client_enums import PrivateEndpointConnectionProvisioningState +from ._azure_databricks_management_client_enums import PrivateLinkServiceConnectionStatus +from ._azure_databricks_management_client_enums import ProvisioningState +from ._azure_databricks_management_client_enums import PublicNetworkAccess +from ._azure_databricks_management_client_enums import RequiredNsgRules +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'AddressSpace', - 'CreatedBy', - 'Encryption', - 'EncryptionEntitiesDefinition', - 'EncryptionV2', - 'EncryptionV2KeyVaultProperties', - 'ErrorDetail', - 'ErrorInfo', - 'ErrorResponse', - 'GroupIdInformation', - 'GroupIdInformationProperties', - 'ManagedIdentityConfiguration', - 'Operation', - 'OperationDisplay', - 'OperationListResult', - 'PrivateEndpoint', - 'PrivateEndpointConnection', - 'PrivateEndpointConnectionProperties', - 'PrivateEndpointConnectionsList', - 'PrivateLinkResourcesList', - 'PrivateLinkServiceConnectionState', - 'Resource', - 'Sku', - 'SystemData', - 'TrackedResource', - 'VirtualNetworkPeering', - 'VirtualNetworkPeeringList', - 'VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork', - 'VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork', - 'Workspace', - 'WorkspaceCustomBooleanParameter', - 'WorkspaceCustomObjectParameter', - 'WorkspaceCustomParameters', - 'WorkspaceCustomStringParameter', - 'WorkspaceEncryptionParameter', - 'WorkspaceListResult', - 'WorkspacePropertiesEncryption', - 'WorkspaceProviderAuthorization', - 'WorkspaceUpdate', - 'CreatedByType', - 'CustomParameterType', - 'EncryptionKeySource', - 'KeySource', - 'PeeringProvisioningState', - 'PeeringState', - 'PrivateEndpointConnectionProvisioningState', - 'PrivateLinkServiceConnectionStatus', - 'ProvisioningState', - 'PublicNetworkAccess', - 'RequiredNsgRules', + "AccessConnector", + "AccessConnectorListResult", + "AccessConnectorProperties", + "AccessConnectorUpdate", + "AddressSpace", + "CreatedBy", + "Encryption", + "EncryptionEntitiesDefinition", + "EncryptionV2", + "EncryptionV2KeyVaultProperties", + "EndpointDependency", + "EndpointDetail", + "ErrorDetail", + "ErrorInfo", + "ErrorResponse", + "GroupIdInformation", + "GroupIdInformationProperties", + "IdentityData", + "ManagedDiskEncryption", + "ManagedDiskEncryptionKeyVaultProperties", + "ManagedIdentityConfiguration", + "Operation", + "OperationDisplay", + "OperationListResult", + "OutboundEnvironmentEndpoint", + "PrivateEndpoint", + "PrivateEndpointConnection", + "PrivateEndpointConnectionProperties", + "PrivateEndpointConnectionsList", + "PrivateLinkResourcesList", + "PrivateLinkServiceConnectionState", + "Resource", + "Sku", + "SystemData", + "TrackedResource", + "VirtualNetworkPeering", + "VirtualNetworkPeeringList", + "VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork", + "VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork", + "Workspace", + "WorkspaceCustomBooleanParameter", + "WorkspaceCustomObjectParameter", + "WorkspaceCustomParameters", + "WorkspaceCustomStringParameter", + "WorkspaceEncryptionParameter", + "WorkspaceListResult", + "WorkspacePropertiesEncryption", + "WorkspaceProviderAuthorization", + "WorkspaceUpdate", + "CreatedByType", + "CustomParameterType", + "EncryptionKeySource", + "IdentityType", + "KeySource", + "PeeringProvisioningState", + "PeeringState", + "PrivateEndpointConnectionProvisioningState", + "PrivateLinkServiceConnectionStatus", + "ProvisioningState", + "PublicNetworkAccess", + "RequiredNsgRules", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py index a0ccab790f46..9f03ff71b49a 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_azure_databricks_management_client_enums.py @@ -6,77 +6,68 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from enum import Enum, EnumMeta -from six import with_metaclass - -class _CaseInsensitiveEnumMeta(EnumMeta): - def __getitem__(self, name): - return super().__getitem__(name.upper()) - - def __getattr__(cls, name): - """Return the enum member matching `name` - We use __getattr__ instead of descriptors or inserting into the enum - class' __dict__ in order to support `name` and `value` being both - properties for enum members (which live in the class' __dict__) and - enum members themselves. - """ - try: - return cls._member_map_[name.upper()] - except KeyError: - raise AttributeError(name) - - -class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The type of identity that created the resource. - """ +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource.""" USER = "User" APPLICATION = "Application" MANAGED_IDENTITY = "ManagedIdentity" KEY = "Key" -class CustomParameterType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Provisioning status of the workspace. - """ + +class CustomParameterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning status of the workspace.""" BOOL = "Bool" OBJECT = "Object" STRING = "String" -class EncryptionKeySource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault - """ + +class EncryptionKeySource(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The encryption keySource (provider). Possible values (case-insensitive): Microsoft.Keyvault.""" MICROSOFT_KEYVAULT = "Microsoft.Keyvault" -class KeySource(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class IdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The identity type.""" + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + + +class KeySource(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The encryption keySource (provider). Possible values (case-insensitive): Default, - Microsoft.Keyvault + Microsoft.Keyvault. """ DEFAULT = "Default" MICROSOFT_KEYVAULT = "Microsoft.Keyvault" -class PeeringProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The current provisioning state. - """ + +class PeeringProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current provisioning state.""" SUCCEEDED = "Succeeded" UPDATING = "Updating" DELETING = "Deleting" FAILED = "Failed" -class PeeringState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The status of the virtual network peering. - """ + +class PeeringState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of the virtual network peering.""" INITIATED = "Initiated" CONNECTED = "Connected" DISCONNECTED = "Disconnected" -class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The current provisioning state. - """ + +class PrivateEndpointConnectionProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current provisioning state.""" SUCCEEDED = "Succeeded" CREATING = "Creating" @@ -84,18 +75,18 @@ class PrivateEndpointConnectionProvisioningState(with_metaclass(_CaseInsensitive DELETING = "Deleting" FAILED = "Failed" -class PrivateLinkServiceConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """The status of a private endpoint connection - """ + +class PrivateLinkServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of a private endpoint connection.""" PENDING = "Pending" APPROVED = "Approved" REJECTED = "Rejected" DISCONNECTED = "Disconnected" -class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): - """Provisioning status of the workspace. - """ + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning status of the workspace.""" ACCEPTED = "Accepted" RUNNING = "Running" @@ -109,7 +100,8 @@ class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): SUCCEEDED = "Succeeded" UPDATING = "Updating" -class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): """The network access type for accessing workspace. Set value to disabled to access workspace only via private link. """ @@ -117,7 +109,8 @@ class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): ENABLED = "Enabled" DISABLED = "Disabled" -class RequiredNsgRules(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): + +class RequiredNsgRules(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. Supported values are 'AllRules' and 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py deleted file mode 100644 index 30cd1f13f340..000000000000 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models.py +++ /dev/null @@ -1,1430 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.exceptions import HttpResponseError -import msrest.serialization - - -class AddressSpace(msrest.serialization.Model): - """AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network. - - :param address_prefixes: A list of address blocks reserved for this virtual network in CIDR - notation. - :type address_prefixes: list[str] - """ - - _attribute_map = { - 'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(AddressSpace, self).__init__(**kwargs) - self.address_prefixes = kwargs.get('address_prefixes', None) - - -class CreatedBy(msrest.serialization.Model): - """Provides details of the entity that created/updated the workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar oid: The Object ID that created the workspace. - :vartype oid: str - :ivar puid: The Personal Object ID corresponding to the object ID above. - :vartype puid: str - :ivar application_id: The application ID of the application that initiated the creation of the - workspace. For example, Azure Portal. - :vartype application_id: str - """ - - _validation = { - 'oid': {'readonly': True}, - 'puid': {'readonly': True}, - 'application_id': {'readonly': True}, - } - - _attribute_map = { - 'oid': {'key': 'oid', 'type': 'str'}, - 'puid': {'key': 'puid', 'type': 'str'}, - 'application_id': {'key': 'applicationId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(CreatedBy, self).__init__(**kwargs) - self.oid = None - self.puid = None - self.application_id = None - - -class Encryption(msrest.serialization.Model): - """The object that contains details of encryption used on the workspace. - - :param key_source: The encryption keySource (provider). Possible values (case-insensitive): - Default, Microsoft.Keyvault. Possible values include: "Default", "Microsoft.Keyvault". Default - value: "Default". - :type key_source: str or ~azure_databricks_management_client.models.KeySource - :param key_name: The name of KeyVault key. - :type key_name: str - :param key_version: The version of KeyVault key. - :type key_version: str - :param key_vault_uri: The Uri of KeyVault. - :type key_vault_uri: str - """ - - _attribute_map = { - 'key_source': {'key': 'keySource', 'type': 'str'}, - 'key_name': {'key': 'KeyName', 'type': 'str'}, - 'key_version': {'key': 'keyversion', 'type': 'str'}, - 'key_vault_uri': {'key': 'keyvaulturi', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Encryption, self).__init__(**kwargs) - self.key_source = kwargs.get('key_source', "Default") - self.key_name = kwargs.get('key_name', None) - self.key_version = kwargs.get('key_version', None) - self.key_vault_uri = kwargs.get('key_vault_uri', None) - - -class EncryptionEntitiesDefinition(msrest.serialization.Model): - """Encryption entities for databricks workspace resource. - - :param managed_services: Encryption properties for the databricks managed services. - :type managed_services: ~azure_databricks_management_client.models.EncryptionV2 - """ - - _attribute_map = { - 'managed_services': {'key': 'managedServices', 'type': 'EncryptionV2'}, - } - - def __init__( - self, - **kwargs - ): - super(EncryptionEntitiesDefinition, self).__init__(**kwargs) - self.managed_services = kwargs.get('managed_services', None) - - -class EncryptionV2(msrest.serialization.Model): - """The object that contains details of encryption used on the workspace. - - All required parameters must be populated in order to send to Azure. - - :param key_source: Required. The encryption keySource (provider). Possible values - (case-insensitive): Microsoft.Keyvault. Possible values include: "Microsoft.Keyvault". - :type key_source: str or ~azure_databricks_management_client.models.EncryptionKeySource - :param key_vault_properties: Key Vault input properties for encryption. - :type key_vault_properties: - ~azure_databricks_management_client.models.EncryptionV2KeyVaultProperties - """ - - _validation = { - 'key_source': {'required': True}, - } - - _attribute_map = { - 'key_source': {'key': 'keySource', 'type': 'str'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionV2KeyVaultProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(EncryptionV2, self).__init__(**kwargs) - self.key_source = kwargs['key_source'] - self.key_vault_properties = kwargs.get('key_vault_properties', None) - - -class EncryptionV2KeyVaultProperties(msrest.serialization.Model): - """Key Vault input properties for encryption. - - All required parameters must be populated in order to send to Azure. - - :param key_vault_uri: Required. The Uri of KeyVault. - :type key_vault_uri: str - :param key_name: Required. The name of KeyVault key. - :type key_name: str - :param key_version: Required. The version of KeyVault key. - :type key_version: str - """ - - _validation = { - 'key_vault_uri': {'required': True}, - 'key_name': {'required': True}, - 'key_version': {'required': True}, - } - - _attribute_map = { - 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, - 'key_name': {'key': 'keyName', 'type': 'str'}, - 'key_version': {'key': 'keyVersion', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(EncryptionV2KeyVaultProperties, self).__init__(**kwargs) - self.key_vault_uri = kwargs['key_vault_uri'] - self.key_name = kwargs['key_name'] - self.key_version = kwargs['key_version'] - - -class ErrorDetail(msrest.serialization.Model): - """Error details. - - All required parameters must be populated in order to send to Azure. - - :param code: Required. The error's code. - :type code: str - :param message: Required. A human readable error message. - :type message: str - :param target: Indicates which property in the request is responsible for the error. - :type target: str - """ - - _validation = { - 'code': {'required': True}, - 'message': {'required': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorDetail, self).__init__(**kwargs) - self.code = kwargs['code'] - self.message = kwargs['message'] - self.target = kwargs.get('target', None) - - -class ErrorInfo(msrest.serialization.Model): - """The code and message for an error. - - All required parameters must be populated in order to send to Azure. - - :param code: Required. A machine readable error code. - :type code: str - :param message: Required. A human readable error message. - :type message: str - :param details: error details. - :type details: list[~azure_databricks_management_client.models.ErrorDetail] - :param innererror: Inner error details if they exist. - :type innererror: str - """ - - _validation = { - 'code': {'required': True}, - 'message': {'required': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - 'innererror': {'key': 'innererror', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorInfo, self).__init__(**kwargs) - self.code = kwargs['code'] - self.message = kwargs['message'] - self.details = kwargs.get('details', None) - self.innererror = kwargs.get('innererror', None) - - -class ErrorResponse(msrest.serialization.Model): - """Contains details when the response code indicates an error. - - All required parameters must be populated in order to send to Azure. - - :param error: Required. The error details. - :type error: ~azure_databricks_management_client.models.ErrorInfo - """ - - _validation = { - 'error': {'required': True}, - } - - _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorInfo'}, - } - - def __init__( - self, - **kwargs - ): - super(ErrorResponse, self).__init__(**kwargs) - self.error = kwargs['error'] - - -class Resource(msrest.serialization.Model): - """The core properties of ARM resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - - -class GroupIdInformation(Resource): - """The group information for creating a private endpoint on a workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param properties: Required. The group id properties. - :type properties: ~azure_databricks_management_client.models.GroupIdInformationProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'GroupIdInformationProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(GroupIdInformation, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class GroupIdInformationProperties(msrest.serialization.Model): - """The properties for a group information object. - - :param group_id: The group id. - :type group_id: str - :param required_members: The required members for a specific group id. - :type required_members: list[str] - :param required_zone_names: The required DNS zones for a specific group id. - :type required_zone_names: list[str] - """ - - _attribute_map = { - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - super(GroupIdInformationProperties, self).__init__(**kwargs) - self.group_id = kwargs.get('group_id', None) - self.required_members = kwargs.get('required_members', None) - self.required_zone_names = kwargs.get('required_zone_names', None) - - -class ManagedIdentityConfiguration(msrest.serialization.Model): - """The Managed Identity details for storage account. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar principal_id: The objectId of the Managed Identity that is linked to the Managed Storage - account. - :vartype principal_id: str - :ivar tenant_id: The tenant Id where the Managed Identity is created. - :vartype tenant_id: str - :ivar type: The type of Identity created. It can be either SystemAssigned or UserAssigned. - :vartype type: str - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'readonly': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(ManagedIdentityConfiguration, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = None - - -class Operation(msrest.serialization.Model): - """REST API operation. - - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param display: The object that represents the operation. - :type display: ~azure_databricks_management_client.models.OperationDisplay - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - } - - def __init__( - self, - **kwargs - ): - super(Operation, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.display = kwargs.get('display', None) - - -class OperationDisplay(msrest.serialization.Model): - """The object that represents the operation. - - :param provider: Service provider: Microsoft.ResourceProvider. - :type provider: str - :param resource: Resource on which the operation is performed. - :type resource: str - :param operation: Operation type: Read, write, delete, etc. - :type operation: str - """ - - _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationDisplay, self).__init__(**kwargs) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - self.operation = kwargs.get('operation', None) - - -class OperationListResult(msrest.serialization.Model): - """Result of the request to list Resource Provider operations. It contains a list of operations and a URL link to get the next set of results. - - :param value: List of Resource Provider operations supported by the Resource Provider resource - provider. - :type value: list[~azure_databricks_management_client.models.Operation] - :param next_link: URL to get the next set of operation list results if there are any. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(OperationListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class PrivateEndpoint(msrest.serialization.Model): - """The private endpoint property of a private endpoint connection. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The resource identifier. - :vartype id: str - """ - - _validation = { - 'id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpoint, self).__init__(**kwargs) - self.id = None - - -class PrivateEndpointConnection(msrest.serialization.Model): - """The private endpoint connection of a workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param properties: Required. The private endpoint connection properties. - :type properties: - ~azure_databricks_management_client.models.PrivateEndpointConnectionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateEndpointConnectionProperties'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointConnection, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.properties = kwargs['properties'] - - -class PrivateEndpointConnectionProperties(msrest.serialization.Model): - """The properties of a private endpoint connection. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :param private_endpoint: Private endpoint. - :type private_endpoint: ~azure_databricks_management_client.models.PrivateEndpoint - :param private_link_service_connection_state: Required. Private endpoint connection state. - :type private_link_service_connection_state: - ~azure_databricks_management_client.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: Provisioning state of the private endpoint connection. Possible - values include: "Succeeded", "Creating", "Updating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure_databricks_management_client.models.PrivateEndpointConnectionProvisioningState - """ - - _validation = { - 'private_link_service_connection_state': {'required': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'private_endpoint': {'key': 'privateEndpoint', 'type': 'PrivateEndpoint'}, - 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointConnectionProperties, self).__init__(**kwargs) - self.private_endpoint = kwargs.get('private_endpoint', None) - self.private_link_service_connection_state = kwargs['private_link_service_connection_state'] - self.provisioning_state = None - - -class PrivateEndpointConnectionsList(msrest.serialization.Model): - """List of private link connections. - - :param value: The list of returned private endpoint connection. - :type value: list[~azure_databricks_management_client.models.PrivateEndpointConnection] - :param next_link: The URL to get the next set of endpoint connections. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateEndpointConnectionsList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class PrivateLinkResourcesList(msrest.serialization.Model): - """The available private link resources for a workspace. - - :param value: The list of available private link resources for a workspace. - :type value: list[~azure_databricks_management_client.models.GroupIdInformation] - :param next_link: The URL to get the next set of private link resources. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[GroupIdInformation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkResourcesList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class PrivateLinkServiceConnectionState(msrest.serialization.Model): - """The current state of a private endpoint connection. - - All required parameters must be populated in order to send to Azure. - - :param status: Required. The status of a private endpoint connection. Possible values include: - "Pending", "Approved", "Rejected", "Disconnected". - :type status: str or - ~azure_databricks_management_client.models.PrivateLinkServiceConnectionStatus - :param description: The description for the current state of a private endpoint connection. - :type description: str - :param action_required: Actions required for a private endpoint connection. - :type action_required: str - """ - - _validation = { - 'status': {'required': True}, - } - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'action_required': {'key': 'actionRequired', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.status = kwargs['status'] - self.description = kwargs.get('description', None) - self.action_required = kwargs.get('action_required', None) - - -class Sku(msrest.serialization.Model): - """SKU for the resource. - - All required parameters must be populated in order to send to Azure. - - :param name: Required. The SKU name. - :type name: str - :param tier: The SKU tier. - :type tier: str - """ - - _validation = { - 'name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Sku, self).__init__(**kwargs) - self.name = kwargs['name'] - self.tier = kwargs.get('tier', None) - - -class SystemData(msrest.serialization.Model): - """Metadata pertaining to creation and last modification of the resource. - - :param created_by: The identity that created the resource. - :type created_by: str - :param created_by_type: The type of identity that created the resource. Possible values - include: "User", "Application", "ManagedIdentity", "Key". - :type created_by_type: str or ~azure_databricks_management_client.models.CreatedByType - :param created_at: The timestamp of resource creation (UTC). - :type created_at: ~datetime.datetime - :param last_modified_by: The identity that last modified the resource. - :type last_modified_by: str - :param last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". - :type last_modified_by_type: str or ~azure_databricks_management_client.models.CreatedByType - :param last_modified_at: The timestamp of resource last modification (UTC). - :type last_modified_at: ~datetime.datetime - """ - - _attribute_map = { - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'created_by_type': {'key': 'createdByType', 'type': 'str'}, - 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, - 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, - 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, - 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - super(SystemData, self).__init__(**kwargs) - self.created_by = kwargs.get('created_by', None) - self.created_by_type = kwargs.get('created_by_type', None) - self.created_at = kwargs.get('created_at', None) - self.last_modified_by = kwargs.get('last_modified_by', None) - self.last_modified_by_type = kwargs.get('last_modified_by_type', None) - self.last_modified_at = kwargs.get('last_modified_at', None) - - -class TrackedResource(Resource): - """The resource model definition for a ARM tracked top level resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: Required. The geo-location where the resource lives. - :type location: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(TrackedResource, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.location = kwargs['location'] - - -class VirtualNetworkPeering(msrest.serialization.Model): - """Peerings in a VirtualNetwork resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar name: Name of the virtual network peering resource. - :vartype name: str - :ivar id: Resource ID. - :vartype id: str - :ivar type: type of the virtual network peering resource. - :vartype type: str - :param allow_virtual_network_access: Whether the VMs in the local virtual network space would - be able to access the VMs in remote virtual network space. - :type allow_virtual_network_access: bool - :param allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the local virtual - network will be allowed/disallowed in remote virtual network. - :type allow_forwarded_traffic: bool - :param allow_gateway_transit: If gateway links can be used in remote virtual networking to link - to this virtual network. - :type allow_gateway_transit: bool - :param use_remote_gateways: If remote gateways can be used on this virtual network. If the flag - is set to true, and allowGatewayTransit on remote peering is also true, virtual network will - use gateways of remote virtual network for transit. Only one peering can have this flag set to - true. This flag cannot be set if virtual network already has a gateway. - :type use_remote_gateways: bool - :param databricks_virtual_network: The remote virtual network should be in the same region. See - here to learn more - (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - :type databricks_virtual_network: - ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork - :param databricks_address_space: The reference to the databricks virtual network address space. - :type databricks_address_space: ~azure_databricks_management_client.models.AddressSpace - :param remote_virtual_network: Required. The remote virtual network should be in the same - region. See here to learn more - (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - :type remote_virtual_network: - ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork - :param remote_address_space: The reference to the remote virtual network address space. - :type remote_address_space: ~azure_databricks_management_client.models.AddressSpace - :ivar peering_state: The status of the virtual network peering. Possible values include: - "Initiated", "Connected", "Disconnected". - :vartype peering_state: str or ~azure_databricks_management_client.models.PeeringState - :ivar provisioning_state: The provisioning state of the virtual network peering resource. - Possible values include: "Succeeded", "Updating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure_databricks_management_client.models.PeeringProvisioningState - """ - - _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'remote_virtual_network': {'required': True}, - 'peering_state': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'}, - 'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'}, - 'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'}, - 'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'}, - 'databricks_virtual_network': {'key': 'properties.databricksVirtualNetwork', 'type': 'VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork'}, - 'databricks_address_space': {'key': 'properties.databricksAddressSpace', 'type': 'AddressSpace'}, - 'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork'}, - 'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'}, - 'peering_state': {'key': 'properties.peeringState', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualNetworkPeering, self).__init__(**kwargs) - self.name = None - self.id = None - self.type = None - self.allow_virtual_network_access = kwargs.get('allow_virtual_network_access', None) - self.allow_forwarded_traffic = kwargs.get('allow_forwarded_traffic', None) - self.allow_gateway_transit = kwargs.get('allow_gateway_transit', None) - self.use_remote_gateways = kwargs.get('use_remote_gateways', None) - self.databricks_virtual_network = kwargs.get('databricks_virtual_network', None) - self.databricks_address_space = kwargs.get('databricks_address_space', None) - self.remote_virtual_network = kwargs['remote_virtual_network'] - self.remote_address_space = kwargs.get('remote_address_space', None) - self.peering_state = None - self.provisioning_state = None - - -class VirtualNetworkPeeringList(msrest.serialization.Model): - """Gets all virtual network peerings under a workspace. - - :param value: List of virtual network peerings on workspace. - :type value: list[~azure_databricks_management_client.models.VirtualNetworkPeering] - :param next_link: URL to get the next set of virtual network peering list results if there are - any. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[VirtualNetworkPeering]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualNetworkPeeringList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork(msrest.serialization.Model): - """The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - - :param id: The Id of the databricks virtual network. - :type id: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - - -class VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork(msrest.serialization.Model): - """The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - - :param id: The Id of the remote virtual network. - :type id: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - - -class Workspace(TrackedResource): - """Information about workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: Required. The geo-location where the resource lives. - :type location: str - :param sku: The SKU of the resource. - :type sku: ~azure_databricks_management_client.models.Sku - :ivar system_data: The system metadata relating to this resource. - :vartype system_data: ~azure_databricks_management_client.models.SystemData - :param managed_resource_group_id: Required. The managed resource group Id. - :type managed_resource_group_id: str - :param parameters: The workspace's custom parameters. - :type parameters: ~azure_databricks_management_client.models.WorkspaceCustomParameters - :ivar provisioning_state: The workspace provisioning state. Possible values include: - "Accepted", "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled", - "Failed", "Succeeded", "Updating". - :vartype provisioning_state: str or - ~azure_databricks_management_client.models.ProvisioningState - :param ui_definition_uri: The blob URI where the UI definition file is located. - :type ui_definition_uri: str - :param authorizations: The workspace provider authorizations. - :type authorizations: - list[~azure_databricks_management_client.models.WorkspaceProviderAuthorization] - :param created_by: Indicates the Object ID, PUID and Application ID of entity that created the - workspace. - :type created_by: ~azure_databricks_management_client.models.CreatedBy - :param updated_by: Indicates the Object ID, PUID and Application ID of entity that last updated - the workspace. - :type updated_by: ~azure_databricks_management_client.models.CreatedBy - :ivar created_date_time: Specifies the date and time when the workspace is created. - :vartype created_date_time: ~datetime.datetime - :ivar workspace_id: The unique identifier of the databricks workspace in databricks control - plane. - :vartype workspace_id: str - :ivar workspace_url: The workspace URL which is of the format - 'adb-{workspaceId}.{random}.azuredatabricks.net'. - :vartype workspace_url: str - :param storage_account_identity: The details of Managed Identity of Storage Account. - :type storage_account_identity: - ~azure_databricks_management_client.models.ManagedIdentityConfiguration - :param encryption: Encryption properties for databricks workspace. - :type encryption: ~azure_databricks_management_client.models.WorkspacePropertiesEncryption - :ivar private_endpoint_connections: Private endpoint connections created on the workspace. - :vartype private_endpoint_connections: - list[~azure_databricks_management_client.models.PrivateEndpointConnection] - :param public_network_access: The network access type for accessing workspace. Set value to - disabled to access workspace only via private link. Possible values include: "Enabled", - "Disabled". - :type public_network_access: str or - ~azure_databricks_management_client.models.PublicNetworkAccess - :param required_nsg_rules: Gets or sets a value indicating whether data plane (clusters) to - control plane communication happen over private endpoint. Supported values are 'AllRules' and - 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Possible values - include: "AllRules", "NoAzureDatabricksRules", "NoAzureServiceRules". - :type required_nsg_rules: str or ~azure_databricks_management_client.models.RequiredNsgRules - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, - 'system_data': {'readonly': True}, - 'managed_resource_group_id': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'workspace_id': {'readonly': True}, - 'workspace_url': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'managed_resource_group_id': {'key': 'properties.managedResourceGroupId', 'type': 'str'}, - 'parameters': {'key': 'properties.parameters', 'type': 'WorkspaceCustomParameters'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'ui_definition_uri': {'key': 'properties.uiDefinitionUri', 'type': 'str'}, - 'authorizations': {'key': 'properties.authorizations', 'type': '[WorkspaceProviderAuthorization]'}, - 'created_by': {'key': 'properties.createdBy', 'type': 'CreatedBy'}, - 'updated_by': {'key': 'properties.updatedBy', 'type': 'CreatedBy'}, - 'created_date_time': {'key': 'properties.createdDateTime', 'type': 'iso-8601'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - 'workspace_url': {'key': 'properties.workspaceUrl', 'type': 'str'}, - 'storage_account_identity': {'key': 'properties.storageAccountIdentity', 'type': 'ManagedIdentityConfiguration'}, - 'encryption': {'key': 'properties.encryption', 'type': 'WorkspacePropertiesEncryption'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'required_nsg_rules': {'key': 'properties.requiredNsgRules', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(Workspace, self).__init__(**kwargs) - self.sku = kwargs.get('sku', None) - self.system_data = None - self.managed_resource_group_id = kwargs['managed_resource_group_id'] - self.parameters = kwargs.get('parameters', None) - self.provisioning_state = None - self.ui_definition_uri = kwargs.get('ui_definition_uri', None) - self.authorizations = kwargs.get('authorizations', None) - self.created_by = kwargs.get('created_by', None) - self.updated_by = kwargs.get('updated_by', None) - self.created_date_time = None - self.workspace_id = None - self.workspace_url = None - self.storage_account_identity = kwargs.get('storage_account_identity', None) - self.encryption = kwargs.get('encryption', None) - self.private_endpoint_connections = None - self.public_network_access = kwargs.get('public_network_access', None) - self.required_nsg_rules = kwargs.get('required_nsg_rules', None) - - -class WorkspaceCustomBooleanParameter(msrest.serialization.Model): - """The value which should be used for this field. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", - "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: Required. The value which should be used for this field. - :type value: bool - """ - - _validation = { - 'type': {'readonly': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceCustomBooleanParameter, self).__init__(**kwargs) - self.type = None - self.value = kwargs['value'] - - -class WorkspaceCustomObjectParameter(msrest.serialization.Model): - """The value which should be used for this field. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", - "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: Required. The value which should be used for this field. - :type value: any - """ - - _validation = { - 'type': {'readonly': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceCustomObjectParameter, self).__init__(**kwargs) - self.type = None - self.value = kwargs['value'] - - -class WorkspaceCustomParameters(msrest.serialization.Model): - """Custom Parameters used for Cluster Creation. - - Variables are only populated by the server, and will be ignored when sending a request. - - :param aml_workspace_id: The ID of a Azure Machine Learning workspace to link with Databricks - workspace. - :type aml_workspace_id: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param custom_virtual_network_id: The ID of a Virtual Network where this Databricks Cluster - should be created. - :type custom_virtual_network_id: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param custom_public_subnet_name: The name of a Public Subnet within the Virtual Network. - :type custom_public_subnet_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param custom_private_subnet_name: The name of the Private Subnet within the Virtual Network. - :type custom_private_subnet_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param enable_no_public_ip: Should the Public IP be Disabled?. - :type enable_no_public_ip: - ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter - :param load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for - Secure Cluster Connectivity (No Public IP). - :type load_balancer_backend_pool_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param load_balancer_id: Resource URI of Outbound Load balancer for Secure Cluster Connectivity - (No Public IP) workspace. - :type load_balancer_id: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param nat_gateway_name: Name of the NAT gateway for Secure Cluster Connectivity (No Public IP) - workspace subnets. - :type nat_gateway_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param public_ip_name: Name of the Public IP for No Public IP workspace with managed vNet. - :type public_ip_name: ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param prepare_encryption: Prepare the workspace for encryption. Enables the Managed Identity - for managed storage account. - :type prepare_encryption: - ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter - :param encryption: Contains the encryption details for Customer-Managed Key (CMK) enabled - workspace. - :type encryption: ~azure_databricks_management_client.models.WorkspaceEncryptionParameter - :param require_infrastructure_encryption: A boolean indicating whether or not the DBFS root - file system will be enabled with secondary layer of encryption with platform managed keys for - data at rest. - :type require_infrastructure_encryption: - ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter - :param storage_account_name: Default DBFS storage account name. - :type storage_account_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param storage_account_sku_name: Storage account SKU name, ex: Standard_GRS, Standard_LRS. - Refer https://aka.ms/storageskus for valid inputs. - :type storage_account_sku_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param vnet_address_prefix: Address prefix for Managed virtual network. Default value for this - input is 10.139. - :type vnet_address_prefix: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :ivar resource_tags: Tags applied to resources under Managed resource group. These can be - updated by updating tags at workspace level. - :vartype resource_tags: - ~azure_databricks_management_client.models.WorkspaceCustomObjectParameter - """ - - _validation = { - 'resource_tags': {'readonly': True}, - } - - _attribute_map = { - 'aml_workspace_id': {'key': 'amlWorkspaceId', 'type': 'WorkspaceCustomStringParameter'}, - 'custom_virtual_network_id': {'key': 'customVirtualNetworkId', 'type': 'WorkspaceCustomStringParameter'}, - 'custom_public_subnet_name': {'key': 'customPublicSubnetName', 'type': 'WorkspaceCustomStringParameter'}, - 'custom_private_subnet_name': {'key': 'customPrivateSubnetName', 'type': 'WorkspaceCustomStringParameter'}, - 'enable_no_public_ip': {'key': 'enableNoPublicIp', 'type': 'WorkspaceCustomBooleanParameter'}, - 'load_balancer_backend_pool_name': {'key': 'loadBalancerBackendPoolName', 'type': 'WorkspaceCustomStringParameter'}, - 'load_balancer_id': {'key': 'loadBalancerId', 'type': 'WorkspaceCustomStringParameter'}, - 'nat_gateway_name': {'key': 'natGatewayName', 'type': 'WorkspaceCustomStringParameter'}, - 'public_ip_name': {'key': 'publicIpName', 'type': 'WorkspaceCustomStringParameter'}, - 'prepare_encryption': {'key': 'prepareEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, - 'encryption': {'key': 'encryption', 'type': 'WorkspaceEncryptionParameter'}, - 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, - 'storage_account_name': {'key': 'storageAccountName', 'type': 'WorkspaceCustomStringParameter'}, - 'storage_account_sku_name': {'key': 'storageAccountSkuName', 'type': 'WorkspaceCustomStringParameter'}, - 'vnet_address_prefix': {'key': 'vnetAddressPrefix', 'type': 'WorkspaceCustomStringParameter'}, - 'resource_tags': {'key': 'resourceTags', 'type': 'WorkspaceCustomObjectParameter'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceCustomParameters, self).__init__(**kwargs) - self.aml_workspace_id = kwargs.get('aml_workspace_id', None) - self.custom_virtual_network_id = kwargs.get('custom_virtual_network_id', None) - self.custom_public_subnet_name = kwargs.get('custom_public_subnet_name', None) - self.custom_private_subnet_name = kwargs.get('custom_private_subnet_name', None) - self.enable_no_public_ip = kwargs.get('enable_no_public_ip', None) - self.load_balancer_backend_pool_name = kwargs.get('load_balancer_backend_pool_name', None) - self.load_balancer_id = kwargs.get('load_balancer_id', None) - self.nat_gateway_name = kwargs.get('nat_gateway_name', None) - self.public_ip_name = kwargs.get('public_ip_name', None) - self.prepare_encryption = kwargs.get('prepare_encryption', None) - self.encryption = kwargs.get('encryption', None) - self.require_infrastructure_encryption = kwargs.get('require_infrastructure_encryption', None) - self.storage_account_name = kwargs.get('storage_account_name', None) - self.storage_account_sku_name = kwargs.get('storage_account_sku_name', None) - self.vnet_address_prefix = kwargs.get('vnet_address_prefix', None) - self.resource_tags = None - - -class WorkspaceCustomStringParameter(msrest.serialization.Model): - """The Value. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", - "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: Required. The value which should be used for this field. - :type value: str - """ - - _validation = { - 'type': {'readonly': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceCustomStringParameter, self).__init__(**kwargs) - self.type = None - self.value = kwargs['value'] - - -class WorkspaceEncryptionParameter(msrest.serialization.Model): - """The object that contains details of encryption used on the workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", - "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: The value which should be used for this field. - :type value: ~azure_databricks_management_client.models.Encryption - """ - - _validation = { - 'type': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'Encryption'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceEncryptionParameter, self).__init__(**kwargs) - self.type = None - self.value = kwargs.get('value', None) - - -class WorkspaceListResult(msrest.serialization.Model): - """List of workspaces. - - :param value: The array of workspaces. - :type value: list[~azure_databricks_management_client.models.Workspace] - :param next_link: The URL to use for getting the next set of results. - :type next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Workspace]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class WorkspacePropertiesEncryption(msrest.serialization.Model): - """Encryption properties for databricks workspace. - - All required parameters must be populated in order to send to Azure. - - :param entities: Required. Encryption entities definition for the workspace. - :type entities: ~azure_databricks_management_client.models.EncryptionEntitiesDefinition - """ - - _validation = { - 'entities': {'required': True}, - } - - _attribute_map = { - 'entities': {'key': 'entities', 'type': 'EncryptionEntitiesDefinition'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspacePropertiesEncryption, self).__init__(**kwargs) - self.entities = kwargs['entities'] - - -class WorkspaceProviderAuthorization(msrest.serialization.Model): - """The workspace provider authorization. - - All required parameters must be populated in order to send to Azure. - - :param principal_id: Required. The provider's principal identifier. This is the identity that - the provider will use to call ARM to manage the workspace resources. - :type principal_id: str - :param role_definition_id: Required. The provider's role definition identifier. This role will - define all the permissions that the provider must have on the workspace's container resource - group. This role definition cannot have permission to delete the resource group. - :type role_definition_id: str - """ - - _validation = { - 'principal_id': {'required': True}, - 'role_definition_id': {'required': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'role_definition_id': {'key': 'roleDefinitionId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceProviderAuthorization, self).__init__(**kwargs) - self.principal_id = kwargs['principal_id'] - self.role_definition_id = kwargs['role_definition_id'] - - -class WorkspaceUpdate(msrest.serialization.Model): - """An update to a workspace. - - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - super(WorkspaceUpdate, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py index e8e7c355d599..79901cc3e9b5 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_models_py3.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,37 +8,275 @@ # -------------------------------------------------------------------------- import datetime -from typing import Any, Dict, List, Optional, Union +import sys +from typing import Any, Dict, List, Optional, TYPE_CHECKING, Union -from azure.core.exceptions import HttpResponseError -import msrest.serialization +from .. import _serialization -from ._azure_databricks_management_client_enums import * +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class AddressSpace(msrest.serialization.Model): - """AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network. - :param address_prefixes: A list of address blocks reserved for this virtual network in CIDR - notation. - :type address_prefixes: list[str] +class Resource(_serialization.Model): + """The core properties of ARM resources. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.id = None + self.name = None + self.type = None + + +class TrackedResource(Resource): + """The resource model definition for a ARM tracked top level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "location": {"required": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + } + + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + """ + super().__init__(**kwargs) + self.tags = tags + self.location = location + + +class AccessConnector(TrackedResource): + """Information about azure databricks accessConnector. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: Fully qualified resource Id for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or + Microsoft.Storage/storageAccounts. + :vartype type: str + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar identity: Identity for the resource. + :vartype identity: ~azure.mgmt.databricks.models.IdentityData + :ivar properties: Azure Databricks accessConnector properties. + :vartype properties: ~azure.mgmt.databricks.models.AccessConnectorProperties """ + _validation = { + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "location": {"required": True}, + } + _attribute_map = { - 'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityData"}, + "properties": {"key": "properties", "type": "AccessConnectorProperties"}, } def __init__( self, *, - address_prefixes: Optional[List[str]] = None, + location: str, + tags: Optional[Dict[str, str]] = None, + identity: Optional["_models.IdentityData"] = None, + properties: Optional["_models.AccessConnectorProperties"] = None, **kwargs ): - super(AddressSpace, self).__init__(**kwargs) + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword identity: Identity for the resource. + :paramtype identity: ~azure.mgmt.databricks.models.IdentityData + :keyword properties: Azure Databricks accessConnector properties. + :paramtype properties: ~azure.mgmt.databricks.models.AccessConnectorProperties + """ + super().__init__(tags=tags, location=location, **kwargs) + self.identity = identity + self.properties = properties + + +class AccessConnectorListResult(_serialization.Model): + """List of azure databricks accessConnector. + + :ivar value: The array of azure databricks accessConnector. + :vartype value: list[~azure.mgmt.databricks.models.AccessConnector] + :ivar next_link: The URL to use for getting the next set of results. + :vartype next_link: str + """ + + _attribute_map = { + "value": {"key": "value", "type": "[AccessConnector]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__( + self, *, value: Optional[List["_models.AccessConnector"]] = None, next_link: Optional[str] = None, **kwargs + ): + """ + :keyword value: The array of azure databricks accessConnector. + :paramtype value: list[~azure.mgmt.databricks.models.AccessConnector] + :keyword next_link: The URL to use for getting the next set of results. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class AccessConnectorProperties(_serialization.Model): + """AccessConnectorProperties. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provisioning_state: Provisioning status of the accessConnector. Known values are: + "Accepted", "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled", + "Failed", "Succeeded", and "Updating". + :vartype provisioning_state: str or ~azure.mgmt.databricks.models.ProvisioningState + """ + + _validation = { + "provisioning_state": {"readonly": True}, + } + + _attribute_map = { + "provisioning_state": {"key": "provisioningState", "type": "str"}, + } + + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) + self.provisioning_state = None + + +class AccessConnectorUpdate(_serialization.Model): + """An update to an azure databricks accessConnector. + + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar identity: The identity of the resource. + :vartype identity: ~azure.mgmt.databricks.models.IdentityData + """ + + _attribute_map = { + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "IdentityData"}, + } + + def __init__( + self, *, tags: Optional[Dict[str, str]] = None, identity: Optional["_models.IdentityData"] = None, **kwargs + ): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword identity: The identity of the resource. + :paramtype identity: ~azure.mgmt.databricks.models.IdentityData + """ + super().__init__(**kwargs) + self.tags = tags + self.identity = identity + + +class AddressSpace(_serialization.Model): + """AddressSpace contains an array of IP address ranges that can be used by subnets of the virtual network. + + :ivar address_prefixes: A list of address blocks reserved for this virtual network in CIDR + notation. + :vartype address_prefixes: list[str] + """ + + _attribute_map = { + "address_prefixes": {"key": "addressPrefixes", "type": "[str]"}, + } + + def __init__(self, *, address_prefixes: Optional[List[str]] = None, **kwargs): + """ + :keyword address_prefixes: A list of address blocks reserved for this virtual network in CIDR + notation. + :paramtype address_prefixes: list[str] + """ + super().__init__(**kwargs) self.address_prefixes = address_prefixes -class CreatedBy(msrest.serialization.Model): +class CreatedBy(_serialization.Model): """Provides details of the entity that created/updated the workspace. Variables are only populated by the server, and will be ignored when sending a request. @@ -52,222 +291,331 @@ class CreatedBy(msrest.serialization.Model): """ _validation = { - 'oid': {'readonly': True}, - 'puid': {'readonly': True}, - 'application_id': {'readonly': True}, + "oid": {"readonly": True}, + "puid": {"readonly": True}, + "application_id": {"readonly": True}, } _attribute_map = { - 'oid': {'key': 'oid', 'type': 'str'}, - 'puid': {'key': 'puid', 'type': 'str'}, - 'application_id': {'key': 'applicationId', 'type': 'str'}, + "oid": {"key": "oid", "type": "str"}, + "puid": {"key": "puid", "type": "str"}, + "application_id": {"key": "applicationId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(CreatedBy, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.oid = None self.puid = None self.application_id = None -class Encryption(msrest.serialization.Model): +class Encryption(_serialization.Model): """The object that contains details of encryption used on the workspace. - :param key_source: The encryption keySource (provider). Possible values (case-insensitive): - Default, Microsoft.Keyvault. Possible values include: "Default", "Microsoft.Keyvault". Default - value: "Default". - :type key_source: str or ~azure_databricks_management_client.models.KeySource - :param key_name: The name of KeyVault key. - :type key_name: str - :param key_version: The version of KeyVault key. - :type key_version: str - :param key_vault_uri: The Uri of KeyVault. - :type key_vault_uri: str + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Default, Microsoft.Keyvault. Known values are: "Default" and "Microsoft.Keyvault". + :vartype key_source: str or ~azure.mgmt.databricks.models.KeySource + :ivar key_name: The name of KeyVault key. + :vartype key_name: str + :ivar key_version: The version of KeyVault key. + :vartype key_version: str + :ivar key_vault_uri: The Uri of KeyVault. + :vartype key_vault_uri: str """ _attribute_map = { - 'key_source': {'key': 'keySource', 'type': 'str'}, - 'key_name': {'key': 'KeyName', 'type': 'str'}, - 'key_version': {'key': 'keyversion', 'type': 'str'}, - 'key_vault_uri': {'key': 'keyvaulturi', 'type': 'str'}, + "key_source": {"key": "keySource", "type": "str"}, + "key_name": {"key": "KeyName", "type": "str"}, + "key_version": {"key": "keyversion", "type": "str"}, + "key_vault_uri": {"key": "keyvaulturi", "type": "str"}, } def __init__( self, *, - key_source: Optional[Union[str, "KeySource"]] = "Default", + key_source: Union[str, "_models.KeySource"] = "Default", key_name: Optional[str] = None, key_version: Optional[str] = None, key_vault_uri: Optional[str] = None, **kwargs ): - super(Encryption, self).__init__(**kwargs) + """ + :keyword key_source: The encryption keySource (provider). Possible values (case-insensitive): + Default, Microsoft.Keyvault. Known values are: "Default" and "Microsoft.Keyvault". + :paramtype key_source: str or ~azure.mgmt.databricks.models.KeySource + :keyword key_name: The name of KeyVault key. + :paramtype key_name: str + :keyword key_version: The version of KeyVault key. + :paramtype key_version: str + :keyword key_vault_uri: The Uri of KeyVault. + :paramtype key_vault_uri: str + """ + super().__init__(**kwargs) self.key_source = key_source self.key_name = key_name self.key_version = key_version self.key_vault_uri = key_vault_uri -class EncryptionEntitiesDefinition(msrest.serialization.Model): +class EncryptionEntitiesDefinition(_serialization.Model): """Encryption entities for databricks workspace resource. - :param managed_services: Encryption properties for the databricks managed services. - :type managed_services: ~azure_databricks_management_client.models.EncryptionV2 + :ivar managed_services: Encryption properties for the databricks managed services. + :vartype managed_services: ~azure.mgmt.databricks.models.EncryptionV2 + :ivar managed_disk: Encryption properties for the databricks managed disks. + :vartype managed_disk: ~azure.mgmt.databricks.models.ManagedDiskEncryption """ _attribute_map = { - 'managed_services': {'key': 'managedServices', 'type': 'EncryptionV2'}, + "managed_services": {"key": "managedServices", "type": "EncryptionV2"}, + "managed_disk": {"key": "managedDisk", "type": "ManagedDiskEncryption"}, } def __init__( self, *, - managed_services: Optional["EncryptionV2"] = None, + managed_services: Optional["_models.EncryptionV2"] = None, + managed_disk: Optional["_models.ManagedDiskEncryption"] = None, **kwargs ): - super(EncryptionEntitiesDefinition, self).__init__(**kwargs) + """ + :keyword managed_services: Encryption properties for the databricks managed services. + :paramtype managed_services: ~azure.mgmt.databricks.models.EncryptionV2 + :keyword managed_disk: Encryption properties for the databricks managed disks. + :paramtype managed_disk: ~azure.mgmt.databricks.models.ManagedDiskEncryption + """ + super().__init__(**kwargs) self.managed_services = managed_services + self.managed_disk = managed_disk -class EncryptionV2(msrest.serialization.Model): +class EncryptionV2(_serialization.Model): """The object that contains details of encryption used on the workspace. All required parameters must be populated in order to send to Azure. - :param key_source: Required. The encryption keySource (provider). Possible values - (case-insensitive): Microsoft.Keyvault. Possible values include: "Microsoft.Keyvault". - :type key_source: str or ~azure_databricks_management_client.models.EncryptionKeySource - :param key_vault_properties: Key Vault input properties for encryption. - :type key_vault_properties: - ~azure_databricks_management_client.models.EncryptionV2KeyVaultProperties + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Keyvault. Required. "Microsoft.Keyvault" + :vartype key_source: str or ~azure.mgmt.databricks.models.EncryptionKeySource + :ivar key_vault_properties: Key Vault input properties for encryption. + :vartype key_vault_properties: ~azure.mgmt.databricks.models.EncryptionV2KeyVaultProperties """ _validation = { - 'key_source': {'required': True}, + "key_source": {"required": True}, } _attribute_map = { - 'key_source': {'key': 'keySource', 'type': 'str'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionV2KeyVaultProperties'}, + "key_source": {"key": "keySource", "type": "str"}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "EncryptionV2KeyVaultProperties"}, } def __init__( self, *, - key_source: Union[str, "EncryptionKeySource"], - key_vault_properties: Optional["EncryptionV2KeyVaultProperties"] = None, + key_source: Union[str, "_models.EncryptionKeySource"], + key_vault_properties: Optional["_models.EncryptionV2KeyVaultProperties"] = None, **kwargs ): - super(EncryptionV2, self).__init__(**kwargs) + """ + :keyword key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Keyvault. Required. "Microsoft.Keyvault" + :paramtype key_source: str or ~azure.mgmt.databricks.models.EncryptionKeySource + :keyword key_vault_properties: Key Vault input properties for encryption. + :paramtype key_vault_properties: ~azure.mgmt.databricks.models.EncryptionV2KeyVaultProperties + """ + super().__init__(**kwargs) self.key_source = key_source self.key_vault_properties = key_vault_properties -class EncryptionV2KeyVaultProperties(msrest.serialization.Model): +class EncryptionV2KeyVaultProperties(_serialization.Model): """Key Vault input properties for encryption. All required parameters must be populated in order to send to Azure. - :param key_vault_uri: Required. The Uri of KeyVault. - :type key_vault_uri: str - :param key_name: Required. The name of KeyVault key. - :type key_name: str - :param key_version: Required. The version of KeyVault key. - :type key_version: str + :ivar key_vault_uri: The Uri of KeyVault. Required. + :vartype key_vault_uri: str + :ivar key_name: The name of KeyVault key. Required. + :vartype key_name: str + :ivar key_version: The version of KeyVault key. Required. + :vartype key_version: str """ _validation = { - 'key_vault_uri': {'required': True}, - 'key_name': {'required': True}, - 'key_version': {'required': True}, + "key_vault_uri": {"required": True}, + "key_name": {"required": True}, + "key_version": {"required": True}, } _attribute_map = { - 'key_vault_uri': {'key': 'keyVaultUri', 'type': 'str'}, - 'key_name': {'key': 'keyName', 'type': 'str'}, - 'key_version': {'key': 'keyVersion', 'type': 'str'}, + "key_vault_uri": {"key": "keyVaultUri", "type": "str"}, + "key_name": {"key": "keyName", "type": "str"}, + "key_version": {"key": "keyVersion", "type": "str"}, + } + + def __init__(self, *, key_vault_uri: str, key_name: str, key_version: str, **kwargs): + """ + :keyword key_vault_uri: The Uri of KeyVault. Required. + :paramtype key_vault_uri: str + :keyword key_name: The name of KeyVault key. Required. + :paramtype key_name: str + :keyword key_version: The version of KeyVault key. Required. + :paramtype key_version: str + """ + super().__init__(**kwargs) + self.key_vault_uri = key_vault_uri + self.key_name = key_name + self.key_version = key_version + + +class EndpointDependency(_serialization.Model): + """A domain name or IP address the Workspace is reaching at. + + :ivar domain_name: The domain name of the dependency. + :vartype domain_name: str + :ivar endpoint_details: The Ports used when connecting to domainName. + :vartype endpoint_details: list[~azure.mgmt.databricks.models.EndpointDetail] + """ + + _attribute_map = { + "domain_name": {"key": "domainName", "type": "str"}, + "endpoint_details": {"key": "endpointDetails", "type": "[EndpointDetail]"}, } def __init__( self, *, - key_vault_uri: str, - key_name: str, - key_version: str, + domain_name: Optional[str] = None, + endpoint_details: Optional[List["_models.EndpointDetail"]] = None, **kwargs ): - super(EncryptionV2KeyVaultProperties, self).__init__(**kwargs) - self.key_vault_uri = key_vault_uri - self.key_name = key_name - self.key_version = key_version + """ + :keyword domain_name: The domain name of the dependency. + :paramtype domain_name: str + :keyword endpoint_details: The Ports used when connecting to domainName. + :paramtype endpoint_details: list[~azure.mgmt.databricks.models.EndpointDetail] + """ + super().__init__(**kwargs) + self.domain_name = domain_name + self.endpoint_details = endpoint_details + + +class EndpointDetail(_serialization.Model): + """Connect information from the Workspace to a single endpoint. + + :ivar ip_address: An IP Address that Domain Name currently resolves to. + :vartype ip_address: str + :ivar port: The port an endpoint is connected to. + :vartype port: int + :ivar latency: The time in milliseconds it takes for the connection to be created from the + Workspace to this IpAddress at this Port. + :vartype latency: float + :ivar is_accessible: Whether it is possible to create a connection from the Workspace to this + IpAddress at this Port. + :vartype is_accessible: bool + """ + _attribute_map = { + "ip_address": {"key": "ipAddress", "type": "str"}, + "port": {"key": "port", "type": "int"}, + "latency": {"key": "latency", "type": "float"}, + "is_accessible": {"key": "isAccessible", "type": "bool"}, + } -class ErrorDetail(msrest.serialization.Model): + def __init__( + self, + *, + ip_address: Optional[str] = None, + port: Optional[int] = None, + latency: Optional[float] = None, + is_accessible: Optional[bool] = None, + **kwargs + ): + """ + :keyword ip_address: An IP Address that Domain Name currently resolves to. + :paramtype ip_address: str + :keyword port: The port an endpoint is connected to. + :paramtype port: int + :keyword latency: The time in milliseconds it takes for the connection to be created from the + Workspace to this IpAddress at this Port. + :paramtype latency: float + :keyword is_accessible: Whether it is possible to create a connection from the Workspace to + this IpAddress at this Port. + :paramtype is_accessible: bool + """ + super().__init__(**kwargs) + self.ip_address = ip_address + self.port = port + self.latency = latency + self.is_accessible = is_accessible + + +class ErrorDetail(_serialization.Model): """Error details. All required parameters must be populated in order to send to Azure. - :param code: Required. The error's code. - :type code: str - :param message: Required. A human readable error message. - :type message: str - :param target: Indicates which property in the request is responsible for the error. - :type target: str + :ivar code: The error's code. Required. + :vartype code: str + :ivar message: A human readable error message. Required. + :vartype message: str + :ivar target: Indicates which property in the request is responsible for the error. + :vartype target: str """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + "code": {"required": True}, + "message": {"required": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, } - def __init__( - self, - *, - code: str, - message: str, - target: Optional[str] = None, - **kwargs - ): - super(ErrorDetail, self).__init__(**kwargs) + def __init__(self, *, code: str, message: str, target: Optional[str] = None, **kwargs): + """ + :keyword code: The error's code. Required. + :paramtype code: str + :keyword message: A human readable error message. Required. + :paramtype message: str + :keyword target: Indicates which property in the request is responsible for the error. + :paramtype target: str + """ + super().__init__(**kwargs) self.code = code self.message = message self.target = target -class ErrorInfo(msrest.serialization.Model): +class ErrorInfo(_serialization.Model): """The code and message for an error. All required parameters must be populated in order to send to Azure. - :param code: Required. A machine readable error code. - :type code: str - :param message: Required. A human readable error message. - :type message: str - :param details: error details. - :type details: list[~azure_databricks_management_client.models.ErrorDetail] - :param innererror: Inner error details if they exist. - :type innererror: str + :ivar code: A machine readable error code. Required. + :vartype code: str + :ivar message: A human readable error message. Required. + :vartype message: str + :ivar details: error details. + :vartype details: list[~azure.mgmt.databricks.models.ErrorDetail] + :ivar innererror: Inner error details if they exist. + :vartype innererror: str """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + "code": {"required": True}, + "message": {"required": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - 'innererror': {'key': 'innererror', 'type': 'str'}, + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetail]"}, + "innererror": {"key": "innererror", "type": "str"}, } def __init__( @@ -275,49 +623,60 @@ def __init__( *, code: str, message: str, - details: Optional[List["ErrorDetail"]] = None, + details: Optional[List["_models.ErrorDetail"]] = None, innererror: Optional[str] = None, **kwargs ): - super(ErrorInfo, self).__init__(**kwargs) + """ + :keyword code: A machine readable error code. Required. + :paramtype code: str + :keyword message: A human readable error message. Required. + :paramtype message: str + :keyword details: error details. + :paramtype details: list[~azure.mgmt.databricks.models.ErrorDetail] + :keyword innererror: Inner error details if they exist. + :paramtype innererror: str + """ + super().__init__(**kwargs) self.code = code self.message = message self.details = details self.innererror = innererror -class ErrorResponse(msrest.serialization.Model): +class ErrorResponse(_serialization.Model): """Contains details when the response code indicates an error. All required parameters must be populated in order to send to Azure. - :param error: Required. The error details. - :type error: ~azure_databricks_management_client.models.ErrorInfo + :ivar error: The error details. Required. + :vartype error: ~azure.mgmt.databricks.models.ErrorInfo """ _validation = { - 'error': {'required': True}, + "error": {"required": True}, } _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorInfo'}, + "error": {"key": "error", "type": "ErrorInfo"}, } - def __init__( - self, - *, - error: "ErrorInfo", - **kwargs - ): - super(ErrorResponse, self).__init__(**kwargs) + def __init__(self, *, error: "_models.ErrorInfo", **kwargs): + """ + :keyword error: The error details. Required. + :paramtype error: ~azure.mgmt.databricks.models.ErrorInfo + """ + super().__init__(**kwargs) self.error = error -class Resource(msrest.serialization.Model): - """The core properties of ARM resources. +class GroupIdInformation(Resource): + """The group information for creating a private endpoint on a workspace. Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. + :ivar id: Fully qualified resource Id for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. :vartype id: str @@ -326,105 +685,203 @@ class Resource(msrest.serialization.Model): :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. :vartype type: str + :ivar properties: The group id properties. Required. + :vartype properties: ~azure.mgmt.databricks.models.GroupIdInformationProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "properties": {"key": "properties", "type": "GroupIdInformationProperties"}, + } + + def __init__(self, *, properties: "_models.GroupIdInformationProperties", **kwargs): + """ + :keyword properties: The group id properties. Required. + :paramtype properties: ~azure.mgmt.databricks.models.GroupIdInformationProperties + """ + super().__init__(**kwargs) + self.properties = properties + + +class GroupIdInformationProperties(_serialization.Model): + """The properties for a group information object. + + :ivar group_id: The group id. + :vartype group_id: str + :ivar required_members: The required members for a specific group id. + :vartype required_members: list[str] + :ivar required_zone_names: The required DNS zones for a specific group id. + :vartype required_zone_names: list[str] + """ + + _attribute_map = { + "group_id": {"key": "groupId", "type": "str"}, + "required_members": {"key": "requiredMembers", "type": "[str]"}, + "required_zone_names": {"key": "requiredZoneNames", "type": "[str]"}, } def __init__( self, + *, + group_id: Optional[str] = None, + required_members: Optional[List[str]] = None, + required_zone_names: Optional[List[str]] = None, **kwargs ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None + """ + :keyword group_id: The group id. + :paramtype group_id: str + :keyword required_members: The required members for a specific group id. + :paramtype required_members: list[str] + :keyword required_zone_names: The required DNS zones for a specific group id. + :paramtype required_zone_names: list[str] + """ + super().__init__(**kwargs) + self.group_id = group_id + self.required_members = required_members + self.required_zone_names = required_zone_names -class GroupIdInformation(Resource): - """The group information for creating a private endpoint on a workspace. +class IdentityData(_serialization.Model): + """Identity for the resource. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param properties: Required. The group id properties. - :type properties: ~azure_databricks_management_client.models.GroupIdInformationProperties + :ivar principal_id: The principal ID of resource identity. + :vartype principal_id: str + :ivar tenant_id: The tenant ID of resource. + :vartype tenant_id: str + :ivar type: The identity type. Required. Known values are: "None" and "SystemAssigned". + :vartype type: str or ~azure.mgmt.databricks.models.IdentityType """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'properties': {'required': True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'GroupIdInformationProperties'}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - *, - properties: "GroupIdInformationProperties", - **kwargs - ): - super(GroupIdInformation, self).__init__(**kwargs) - self.properties = properties + def __init__(self, *, type: Union[str, "_models.IdentityType"], **kwargs): + """ + :keyword type: The identity type. Required. Known values are: "None" and "SystemAssigned". + :paramtype type: str or ~azure.mgmt.databricks.models.IdentityType + """ + super().__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + self.type = type -class GroupIdInformationProperties(msrest.serialization.Model): - """The properties for a group information object. +class ManagedDiskEncryption(_serialization.Model): + """The object that contains details of encryption used on the workspace. + + All required parameters must be populated in order to send to Azure. - :param group_id: The group id. - :type group_id: str - :param required_members: The required members for a specific group id. - :type required_members: list[str] - :param required_zone_names: The required DNS zones for a specific group id. - :type required_zone_names: list[str] + :ivar key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Keyvault. Required. "Microsoft.Keyvault" + :vartype key_source: str or ~azure.mgmt.databricks.models.EncryptionKeySource + :ivar key_vault_properties: Key Vault input properties for encryption. Required. + :vartype key_vault_properties: + ~azure.mgmt.databricks.models.ManagedDiskEncryptionKeyVaultProperties + :ivar rotation_to_latest_key_version_enabled: Indicate whether the latest key version should be + automatically used for Managed Disk Encryption. + :vartype rotation_to_latest_key_version_enabled: bool """ + _validation = { + "key_source": {"required": True}, + "key_vault_properties": {"required": True}, + } + _attribute_map = { - 'group_id': {'key': 'groupId', 'type': 'str'}, - 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + "key_source": {"key": "keySource", "type": "str"}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "ManagedDiskEncryptionKeyVaultProperties"}, + "rotation_to_latest_key_version_enabled": {"key": "rotationToLatestKeyVersionEnabled", "type": "bool"}, } def __init__( self, *, - group_id: Optional[str] = None, - required_members: Optional[List[str]] = None, - required_zone_names: Optional[List[str]] = None, + key_source: Union[str, "_models.EncryptionKeySource"], + key_vault_properties: "_models.ManagedDiskEncryptionKeyVaultProperties", + rotation_to_latest_key_version_enabled: Optional[bool] = None, **kwargs ): - super(GroupIdInformationProperties, self).__init__(**kwargs) - self.group_id = group_id - self.required_members = required_members - self.required_zone_names = required_zone_names + """ + :keyword key_source: The encryption keySource (provider). Possible values (case-insensitive): + Microsoft.Keyvault. Required. "Microsoft.Keyvault" + :paramtype key_source: str or ~azure.mgmt.databricks.models.EncryptionKeySource + :keyword key_vault_properties: Key Vault input properties for encryption. Required. + :paramtype key_vault_properties: + ~azure.mgmt.databricks.models.ManagedDiskEncryptionKeyVaultProperties + :keyword rotation_to_latest_key_version_enabled: Indicate whether the latest key version should + be automatically used for Managed Disk Encryption. + :paramtype rotation_to_latest_key_version_enabled: bool + """ + super().__init__(**kwargs) + self.key_source = key_source + self.key_vault_properties = key_vault_properties + self.rotation_to_latest_key_version_enabled = rotation_to_latest_key_version_enabled + +class ManagedDiskEncryptionKeyVaultProperties(_serialization.Model): + """Key Vault input properties for encryption. + + All required parameters must be populated in order to send to Azure. -class ManagedIdentityConfiguration(msrest.serialization.Model): + :ivar key_vault_uri: The URI of KeyVault. Required. + :vartype key_vault_uri: str + :ivar key_name: The name of KeyVault key. Required. + :vartype key_name: str + :ivar key_version: The version of KeyVault key. Required. + :vartype key_version: str + """ + + _validation = { + "key_vault_uri": {"required": True}, + "key_name": {"required": True}, + "key_version": {"required": True}, + } + + _attribute_map = { + "key_vault_uri": {"key": "keyVaultUri", "type": "str"}, + "key_name": {"key": "keyName", "type": "str"}, + "key_version": {"key": "keyVersion", "type": "str"}, + } + + def __init__(self, *, key_vault_uri: str, key_name: str, key_version: str, **kwargs): + """ + :keyword key_vault_uri: The URI of KeyVault. Required. + :paramtype key_vault_uri: str + :keyword key_name: The name of KeyVault key. Required. + :paramtype key_name: str + :keyword key_version: The version of KeyVault key. Required. + :paramtype key_version: str + """ + super().__init__(**kwargs) + self.key_vault_uri = key_vault_uri + self.key_name = key_name + self.key_version = key_version + + +class ManagedIdentityConfiguration(_serialization.Model): """The Managed Identity details for storage account. Variables are only populated by the server, and will be ignored when sending a request. @@ -439,68 +896,66 @@ class ManagedIdentityConfiguration(msrest.serialization.Model): """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'readonly': True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"readonly": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(ManagedIdentityConfiguration, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.principal_id = None self.tenant_id = None self.type = None -class Operation(msrest.serialization.Model): +class Operation(_serialization.Model): """REST API operation. - :param name: Operation name: {provider}/{resource}/{operation}. - :type name: str - :param display: The object that represents the operation. - :type display: ~azure_databricks_management_client.models.OperationDisplay + :ivar name: Operation name: {provider}/{resource}/{operation}. + :vartype name: str + :ivar display: The object that represents the operation. + :vartype display: ~azure.mgmt.databricks.models.OperationDisplay """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display': {'key': 'display', 'type': 'OperationDisplay'}, + "name": {"key": "name", "type": "str"}, + "display": {"key": "display", "type": "OperationDisplay"}, } - def __init__( - self, - *, - name: Optional[str] = None, - display: Optional["OperationDisplay"] = None, - **kwargs - ): - super(Operation, self).__init__(**kwargs) + def __init__(self, *, name: Optional[str] = None, display: Optional["_models.OperationDisplay"] = None, **kwargs): + """ + :keyword name: Operation name: {provider}/{resource}/{operation}. + :paramtype name: str + :keyword display: The object that represents the operation. + :paramtype display: ~azure.mgmt.databricks.models.OperationDisplay + """ + super().__init__(**kwargs) self.name = name self.display = display -class OperationDisplay(msrest.serialization.Model): +class OperationDisplay(_serialization.Model): """The object that represents the operation. - :param provider: Service provider: Microsoft.ResourceProvider. - :type provider: str - :param resource: Resource on which the operation is performed. - :type resource: str - :param operation: Operation type: Read, write, delete, etc. - :type operation: str + :ivar provider: Service provider: Microsoft.ResourceProvider. + :vartype provider: str + :ivar resource: Resource on which the operation is performed. + :vartype resource: str + :ivar operation: Operation type: Read, write, delete, etc. + :vartype operation: str """ _attribute_map = { - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, } def __init__( @@ -511,40 +966,83 @@ def __init__( operation: Optional[str] = None, **kwargs ): - super(OperationDisplay, self).__init__(**kwargs) + """ + :keyword provider: Service provider: Microsoft.ResourceProvider. + :paramtype provider: str + :keyword resource: Resource on which the operation is performed. + :paramtype resource: str + :keyword operation: Operation type: Read, write, delete, etc. + :paramtype operation: str + """ + super().__init__(**kwargs) self.provider = provider self.resource = resource self.operation = operation -class OperationListResult(msrest.serialization.Model): +class OperationListResult(_serialization.Model): """Result of the request to list Resource Provider operations. It contains a list of operations and a URL link to get the next set of results. - :param value: List of Resource Provider operations supported by the Resource Provider resource + :ivar value: List of Resource Provider operations supported by the Resource Provider resource provider. - :type value: list[~azure_databricks_management_client.models.Operation] - :param next_link: URL to get the next set of operation list results if there are any. - :type next_link: str + :vartype value: list[~azure.mgmt.databricks.models.Operation] + :ivar next_link: URL to get the next set of operation list results if there are any. + :vartype next_link: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[Operation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Operation]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, *, value: Optional[List["_models.Operation"]] = None, next_link: Optional[str] = None, **kwargs): + """ + :keyword value: List of Resource Provider operations supported by the Resource Provider + resource provider. + :paramtype value: list[~azure.mgmt.databricks.models.Operation] + :keyword next_link: URL to get the next set of operation list results if there are any. + :paramtype next_link: str + """ + super().__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class OutboundEnvironmentEndpoint(_serialization.Model): + """Egress endpoints which Workspace connects to for common purposes. + + :ivar category: The category of endpoints accessed by the Workspace, e.g. azure-storage, + azure-mysql, etc. + :vartype category: str + :ivar endpoints: The endpoints that Workspace connect to. + :vartype endpoints: list[~azure.mgmt.databricks.models.EndpointDependency] + """ + + _attribute_map = { + "category": {"key": "category", "type": "str"}, + "endpoints": {"key": "endpoints", "type": "[EndpointDependency]"}, } def __init__( self, *, - value: Optional[List["Operation"]] = None, - next_link: Optional[str] = None, + category: Optional[str] = None, + endpoints: Optional[List["_models.EndpointDependency"]] = None, **kwargs ): - super(OperationListResult, self).__init__(**kwargs) - self.value = value - self.next_link = next_link - - -class PrivateEndpoint(msrest.serialization.Model): + """ + :keyword category: The category of endpoints accessed by the Workspace, e.g. azure-storage, + azure-mysql, etc. + :paramtype category: str + :keyword endpoints: The endpoints that Workspace connect to. + :paramtype endpoints: list[~azure.mgmt.databricks.models.EndpointDependency] + """ + super().__init__(**kwargs) + self.category = category + self.endpoints = endpoints + + +class PrivateEndpoint(_serialization.Model): """The private endpoint property of a private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. @@ -554,22 +1052,20 @@ class PrivateEndpoint(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, + "id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - **kwargs - ): - super(PrivateEndpoint, self).__init__(**kwargs) + def __init__(self, **kwargs): + """ """ + super().__init__(**kwargs) self.id = None -class PrivateEndpointConnection(msrest.serialization.Model): +class PrivateEndpointConnection(_serialization.Model): """The private endpoint connection of a workspace. Variables are only populated by the server, and will be ignored when sending a request. @@ -582,243 +1078,283 @@ class PrivateEndpointConnection(msrest.serialization.Model): :vartype name: str :ivar type: The resource type. :vartype type: str - :param properties: Required. The private endpoint connection properties. - :type properties: - ~azure_databricks_management_client.models.PrivateEndpointConnectionProperties + :ivar properties: The private endpoint connection properties. Required. + :vartype properties: ~azure.mgmt.databricks.models.PrivateEndpointConnectionProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'PrivateEndpointConnectionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "properties": {"key": "properties", "type": "PrivateEndpointConnectionProperties"}, } - def __init__( - self, - *, - properties: "PrivateEndpointConnectionProperties", - **kwargs - ): - super(PrivateEndpointConnection, self).__init__(**kwargs) + def __init__(self, *, properties: "_models.PrivateEndpointConnectionProperties", **kwargs): + """ + :keyword properties: The private endpoint connection properties. Required. + :paramtype properties: ~azure.mgmt.databricks.models.PrivateEndpointConnectionProperties + """ + super().__init__(**kwargs) self.id = None self.name = None self.type = None self.properties = properties -class PrivateEndpointConnectionProperties(msrest.serialization.Model): +class PrivateEndpointConnectionProperties(_serialization.Model): """The properties of a private endpoint connection. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param private_endpoint: Private endpoint. - :type private_endpoint: ~azure_databricks_management_client.models.PrivateEndpoint - :param private_link_service_connection_state: Required. Private endpoint connection state. - :type private_link_service_connection_state: - ~azure_databricks_management_client.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: Provisioning state of the private endpoint connection. Possible - values include: "Succeeded", "Creating", "Updating", "Deleting", "Failed". + :ivar private_endpoint: Private endpoint. + :vartype private_endpoint: ~azure.mgmt.databricks.models.PrivateEndpoint + :ivar private_link_service_connection_state: Private endpoint connection state. Required. + :vartype private_link_service_connection_state: + ~azure.mgmt.databricks.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: Provisioning state of the private endpoint connection. Known values + are: "Succeeded", "Creating", "Updating", "Deleting", and "Failed". :vartype provisioning_state: str or - ~azure_databricks_management_client.models.PrivateEndpointConnectionProvisioningState + ~azure.mgmt.databricks.models.PrivateEndpointConnectionProvisioningState """ _validation = { - 'private_link_service_connection_state': {'required': True}, - 'provisioning_state': {'readonly': True}, + "private_link_service_connection_state": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'private_endpoint': {'key': 'privateEndpoint', 'type': 'PrivateEndpoint'}, - 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "private_endpoint": {"key": "privateEndpoint", "type": "PrivateEndpoint"}, + "private_link_service_connection_state": { + "key": "privateLinkServiceConnectionState", + "type": "PrivateLinkServiceConnectionState", + }, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( self, *, - private_link_service_connection_state: "PrivateLinkServiceConnectionState", - private_endpoint: Optional["PrivateEndpoint"] = None, + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState", + private_endpoint: Optional["_models.PrivateEndpoint"] = None, **kwargs ): - super(PrivateEndpointConnectionProperties, self).__init__(**kwargs) + """ + :keyword private_endpoint: Private endpoint. + :paramtype private_endpoint: ~azure.mgmt.databricks.models.PrivateEndpoint + :keyword private_link_service_connection_state: Private endpoint connection state. Required. + :paramtype private_link_service_connection_state: + ~azure.mgmt.databricks.models.PrivateLinkServiceConnectionState + """ + super().__init__(**kwargs) self.private_endpoint = private_endpoint self.private_link_service_connection_state = private_link_service_connection_state self.provisioning_state = None -class PrivateEndpointConnectionsList(msrest.serialization.Model): +class PrivateEndpointConnectionsList(_serialization.Model): """List of private link connections. - :param value: The list of returned private endpoint connection. - :type value: list[~azure_databricks_management_client.models.PrivateEndpointConnection] - :param next_link: The URL to get the next set of endpoint connections. - :type next_link: str + :ivar value: The list of returned private endpoint connection. + :vartype value: list[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :ivar next_link: The URL to get the next set of endpoint connections. + :vartype next_link: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[PrivateEndpointConnection]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, - value: Optional[List["PrivateEndpointConnection"]] = None, + value: Optional[List["_models.PrivateEndpointConnection"]] = None, next_link: Optional[str] = None, **kwargs ): - super(PrivateEndpointConnectionsList, self).__init__(**kwargs) + """ + :keyword value: The list of returned private endpoint connection. + :paramtype value: list[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :keyword next_link: The URL to get the next set of endpoint connections. + :paramtype next_link: str + """ + super().__init__(**kwargs) self.value = value self.next_link = next_link -class PrivateLinkResourcesList(msrest.serialization.Model): +class PrivateLinkResourcesList(_serialization.Model): """The available private link resources for a workspace. - :param value: The list of available private link resources for a workspace. - :type value: list[~azure_databricks_management_client.models.GroupIdInformation] - :param next_link: The URL to get the next set of private link resources. - :type next_link: str + :ivar value: The list of available private link resources for a workspace. + :vartype value: list[~azure.mgmt.databricks.models.GroupIdInformation] + :ivar next_link: The URL to get the next set of private link resources. + :vartype next_link: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[GroupIdInformation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[GroupIdInformation]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: Optional[List["GroupIdInformation"]] = None, - next_link: Optional[str] = None, - **kwargs + self, *, value: Optional[List["_models.GroupIdInformation"]] = None, next_link: Optional[str] = None, **kwargs ): - super(PrivateLinkResourcesList, self).__init__(**kwargs) + """ + :keyword value: The list of available private link resources for a workspace. + :paramtype value: list[~azure.mgmt.databricks.models.GroupIdInformation] + :keyword next_link: The URL to get the next set of private link resources. + :paramtype next_link: str + """ + super().__init__(**kwargs) self.value = value self.next_link = next_link -class PrivateLinkServiceConnectionState(msrest.serialization.Model): +class PrivateLinkServiceConnectionState(_serialization.Model): """The current state of a private endpoint connection. All required parameters must be populated in order to send to Azure. - :param status: Required. The status of a private endpoint connection. Possible values include: - "Pending", "Approved", "Rejected", "Disconnected". - :type status: str or - ~azure_databricks_management_client.models.PrivateLinkServiceConnectionStatus - :param description: The description for the current state of a private endpoint connection. - :type description: str - :param action_required: Actions required for a private endpoint connection. - :type action_required: str + :ivar status: The status of a private endpoint connection. Required. Known values are: + "Pending", "Approved", "Rejected", and "Disconnected". + :vartype status: str or ~azure.mgmt.databricks.models.PrivateLinkServiceConnectionStatus + :ivar description: The description for the current state of a private endpoint connection. + :vartype description: str + :ivar action_required: Actions required for a private endpoint connection. + :vartype action_required: str """ _validation = { - 'status': {'required': True}, + "status": {"required": True}, } _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'action_required': {'key': 'actionRequired', 'type': 'str'}, + "status": {"key": "status", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "action_required": {"key": "actionRequired", "type": "str"}, } def __init__( self, *, - status: Union[str, "PrivateLinkServiceConnectionStatus"], + status: Union[str, "_models.PrivateLinkServiceConnectionStatus"], description: Optional[str] = None, action_required: Optional[str] = None, **kwargs ): - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + """ + :keyword status: The status of a private endpoint connection. Required. Known values are: + "Pending", "Approved", "Rejected", and "Disconnected". + :paramtype status: str or ~azure.mgmt.databricks.models.PrivateLinkServiceConnectionStatus + :keyword description: The description for the current state of a private endpoint connection. + :paramtype description: str + :keyword action_required: Actions required for a private endpoint connection. + :paramtype action_required: str + """ + super().__init__(**kwargs) self.status = status self.description = description self.action_required = action_required -class Sku(msrest.serialization.Model): +class Sku(_serialization.Model): """SKU for the resource. All required parameters must be populated in order to send to Azure. - :param name: Required. The SKU name. - :type name: str - :param tier: The SKU tier. - :type tier: str + :ivar name: The SKU name. Required. + :vartype name: str + :ivar tier: The SKU tier. + :vartype tier: str """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, } - def __init__( - self, - *, - name: str, - tier: Optional[str] = None, - **kwargs - ): - super(Sku, self).__init__(**kwargs) + def __init__(self, *, name: str, tier: Optional[str] = None, **kwargs): + """ + :keyword name: The SKU name. Required. + :paramtype name: str + :keyword tier: The SKU tier. + :paramtype tier: str + """ + super().__init__(**kwargs) self.name = name self.tier = tier -class SystemData(msrest.serialization.Model): +class SystemData(_serialization.Model): """Metadata pertaining to creation and last modification of the resource. - :param created_by: The identity that created the resource. - :type created_by: str - :param created_by_type: The type of identity that created the resource. Possible values - include: "User", "Application", "ManagedIdentity", "Key". - :type created_by_type: str or ~azure_databricks_management_client.models.CreatedByType - :param created_at: The timestamp of resource creation (UTC). - :type created_at: ~datetime.datetime - :param last_modified_by: The identity that last modified the resource. - :type last_modified_by: str - :param last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". - :type last_modified_by_type: str or ~azure_databricks_management_client.models.CreatedByType - :param last_modified_at: The timestamp of resource last modification (UTC). - :type last_modified_at: ~datetime.datetime + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.mgmt.databricks.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.databricks.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime """ _attribute_map = { - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'created_by_type': {'key': 'createdByType', 'type': 'str'}, - 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, - 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, - 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, - 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + "created_by": {"key": "createdBy", "type": "str"}, + "created_by_type": {"key": "createdByType", "type": "str"}, + "created_at": {"key": "createdAt", "type": "iso-8601"}, + "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, + "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, + "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, } def __init__( self, *, created_by: Optional[str] = None, - created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, created_at: Optional[datetime.datetime] = None, last_modified_by: Optional[str] = None, - last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, last_modified_at: Optional[datetime.datetime] = None, **kwargs ): - super(SystemData, self).__init__(**kwargs) + """ + :keyword created_by: The identity that created the resource. + :paramtype created_by: str + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :paramtype created_by_type: str or ~azure.mgmt.databricks.models.CreatedByType + :keyword created_at: The timestamp of resource creation (UTC). + :paramtype created_at: ~datetime.datetime + :keyword last_modified_by: The identity that last modified the resource. + :paramtype last_modified_by: str + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", and "Key". + :paramtype last_modified_by_type: str or ~azure.mgmt.databricks.models.CreatedByType + :keyword last_modified_at: The timestamp of resource last modification (UTC). + :paramtype last_modified_at: ~datetime.datetime + """ + super().__init__(**kwargs) self.created_by = created_by self.created_by_type = created_by_type self.created_at = created_at @@ -827,55 +1363,7 @@ def __init__( self.last_modified_at = last_modified_at -class TrackedResource(Resource): - """The resource model definition for a ARM tracked top level resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource Id for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or - Microsoft.Storage/storageAccounts. - :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: Required. The geo-location where the resource lives. - :type location: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - def __init__( - self, - *, - location: str, - tags: Optional[Dict[str, str]] = None, - **kwargs - ): - super(TrackedResource, self).__init__(**kwargs) - self.tags = tags - self.location = location - - -class VirtualNetworkPeering(msrest.serialization.Model): +class VirtualNetworkPeering(_serialization.Model): # pylint: disable=too-many-instance-attributes """Peerings in a VirtualNetwork resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -888,82 +1376,122 @@ class VirtualNetworkPeering(msrest.serialization.Model): :vartype id: str :ivar type: type of the virtual network peering resource. :vartype type: str - :param allow_virtual_network_access: Whether the VMs in the local virtual network space would - be able to access the VMs in remote virtual network space. - :type allow_virtual_network_access: bool - :param allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the local virtual + :ivar allow_virtual_network_access: Whether the VMs in the local virtual network space would be + able to access the VMs in remote virtual network space. + :vartype allow_virtual_network_access: bool + :ivar allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed in remote virtual network. - :type allow_forwarded_traffic: bool - :param allow_gateway_transit: If gateway links can be used in remote virtual networking to link + :vartype allow_forwarded_traffic: bool + :ivar allow_gateway_transit: If gateway links can be used in remote virtual networking to link to this virtual network. - :type allow_gateway_transit: bool - :param use_remote_gateways: If remote gateways can be used on this virtual network. If the flag + :vartype allow_gateway_transit: bool + :ivar use_remote_gateways: If remote gateways can be used on this virtual network. If the flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network will use gateways of remote virtual network for transit. Only one peering can have this flag set to true. This flag cannot be set if virtual network already has a gateway. - :type use_remote_gateways: bool - :param databricks_virtual_network: The remote virtual network should be in the same region. See + :vartype use_remote_gateways: bool + :ivar databricks_virtual_network: The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - :type databricks_virtual_network: - ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork - :param databricks_address_space: The reference to the databricks virtual network address space. - :type databricks_address_space: ~azure_databricks_management_client.models.AddressSpace - :param remote_virtual_network: Required. The remote virtual network should be in the same - region. See here to learn more + :vartype databricks_virtual_network: + ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork + :ivar databricks_address_space: The reference to the databricks virtual network address space. + :vartype databricks_address_space: ~azure.mgmt.databricks.models.AddressSpace + :ivar remote_virtual_network: The remote virtual network should be in the same region. See here + to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - :type remote_virtual_network: - ~azure_databricks_management_client.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork - :param remote_address_space: The reference to the remote virtual network address space. - :type remote_address_space: ~azure_databricks_management_client.models.AddressSpace - :ivar peering_state: The status of the virtual network peering. Possible values include: - "Initiated", "Connected", "Disconnected". - :vartype peering_state: str or ~azure_databricks_management_client.models.PeeringState - :ivar provisioning_state: The provisioning state of the virtual network peering resource. - Possible values include: "Succeeded", "Updating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure_databricks_management_client.models.PeeringProvisioningState + Required. + :vartype remote_virtual_network: + ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork + :ivar remote_address_space: The reference to the remote virtual network address space. + :vartype remote_address_space: ~azure.mgmt.databricks.models.AddressSpace + :ivar peering_state: The status of the virtual network peering. Known values are: "Initiated", + "Connected", and "Disconnected". + :vartype peering_state: str or ~azure.mgmt.databricks.models.PeeringState + :ivar provisioning_state: The provisioning state of the virtual network peering resource. Known + values are: "Succeeded", "Updating", "Deleting", and "Failed". + :vartype provisioning_state: str or ~azure.mgmt.databricks.models.PeeringProvisioningState """ _validation = { - 'name': {'readonly': True}, - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'remote_virtual_network': {'required': True}, - 'peering_state': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "name": {"readonly": True}, + "id": {"readonly": True}, + "type": {"readonly": True}, + "remote_virtual_network": {"required": True}, + "peering_state": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'}, - 'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'}, - 'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'}, - 'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'}, - 'databricks_virtual_network': {'key': 'properties.databricksVirtualNetwork', 'type': 'VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork'}, - 'databricks_address_space': {'key': 'properties.databricksAddressSpace', 'type': 'AddressSpace'}, - 'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork'}, - 'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'}, - 'peering_state': {'key': 'properties.peeringState', 'type': 'str'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "id": {"key": "id", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "allow_virtual_network_access": {"key": "properties.allowVirtualNetworkAccess", "type": "bool"}, + "allow_forwarded_traffic": {"key": "properties.allowForwardedTraffic", "type": "bool"}, + "allow_gateway_transit": {"key": "properties.allowGatewayTransit", "type": "bool"}, + "use_remote_gateways": {"key": "properties.useRemoteGateways", "type": "bool"}, + "databricks_virtual_network": { + "key": "properties.databricksVirtualNetwork", + "type": "VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork", + }, + "databricks_address_space": {"key": "properties.databricksAddressSpace", "type": "AddressSpace"}, + "remote_virtual_network": { + "key": "properties.remoteVirtualNetwork", + "type": "VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork", + }, + "remote_address_space": {"key": "properties.remoteAddressSpace", "type": "AddressSpace"}, + "peering_state": {"key": "properties.peeringState", "type": "str"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, - remote_virtual_network: "VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork", + remote_virtual_network: "_models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork", allow_virtual_network_access: Optional[bool] = None, allow_forwarded_traffic: Optional[bool] = None, allow_gateway_transit: Optional[bool] = None, use_remote_gateways: Optional[bool] = None, - databricks_virtual_network: Optional["VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork"] = None, - databricks_address_space: Optional["AddressSpace"] = None, - remote_address_space: Optional["AddressSpace"] = None, + databricks_virtual_network: Optional[ + "_models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork" + ] = None, + databricks_address_space: Optional["_models.AddressSpace"] = None, + remote_address_space: Optional["_models.AddressSpace"] = None, **kwargs ): - super(VirtualNetworkPeering, self).__init__(**kwargs) + """ + :keyword allow_virtual_network_access: Whether the VMs in the local virtual network space would + be able to access the VMs in remote virtual network space. + :paramtype allow_virtual_network_access: bool + :keyword allow_forwarded_traffic: Whether the forwarded traffic from the VMs in the local + virtual network will be allowed/disallowed in remote virtual network. + :paramtype allow_forwarded_traffic: bool + :keyword allow_gateway_transit: If gateway links can be used in remote virtual networking to + link to this virtual network. + :paramtype allow_gateway_transit: bool + :keyword use_remote_gateways: If remote gateways can be used on this virtual network. If the + flag is set to true, and allowGatewayTransit on remote peering is also true, virtual network + will use gateways of remote virtual network for transit. Only one peering can have this flag + set to true. This flag cannot be set if virtual network already has a gateway. + :paramtype use_remote_gateways: bool + :keyword databricks_virtual_network: The remote virtual network should be in the same region. + See here to learn more + (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). + :paramtype databricks_virtual_network: + ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork + :keyword databricks_address_space: The reference to the databricks virtual network address + space. + :paramtype databricks_address_space: ~azure.mgmt.databricks.models.AddressSpace + :keyword remote_virtual_network: The remote virtual network should be in the same region. See + here to learn more + (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). + Required. + :paramtype remote_virtual_network: + ~azure.mgmt.databricks.models.VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork + :keyword remote_address_space: The reference to the remote virtual network address space. + :paramtype remote_address_space: ~azure.mgmt.databricks.models.AddressSpace + """ + super().__init__(**kwargs) self.name = None self.id = None self.type = None @@ -979,76 +1507,81 @@ def __init__( self.provisioning_state = None -class VirtualNetworkPeeringList(msrest.serialization.Model): +class VirtualNetworkPeeringList(_serialization.Model): """Gets all virtual network peerings under a workspace. - :param value: List of virtual network peerings on workspace. - :type value: list[~azure_databricks_management_client.models.VirtualNetworkPeering] - :param next_link: URL to get the next set of virtual network peering list results if there are + :ivar value: List of virtual network peerings on workspace. + :vartype value: list[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :ivar next_link: URL to get the next set of virtual network peering list results if there are any. - :type next_link: str + :vartype next_link: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[VirtualNetworkPeering]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[VirtualNetworkPeering]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( self, *, - value: Optional[List["VirtualNetworkPeering"]] = None, + value: Optional[List["_models.VirtualNetworkPeering"]] = None, next_link: Optional[str] = None, **kwargs ): - super(VirtualNetworkPeeringList, self).__init__(**kwargs) + """ + :keyword value: List of virtual network peerings on workspace. + :paramtype value: list[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :keyword next_link: URL to get the next set of virtual network peering list results if there + are any. + :paramtype next_link: str + """ + super().__init__(**kwargs) self.value = value self.next_link = next_link -class VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork(msrest.serialization.Model): +class VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork(_serialization.Model): """The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - :param id: The Id of the databricks virtual network. - :type id: str + :ivar id: The Id of the databricks virtual network. + :vartype id: str """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - *, - id: Optional[str] = None, - **kwargs - ): - super(VirtualNetworkPeeringPropertiesFormatDatabricksVirtualNetwork, self).__init__(**kwargs) + def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin + """ + :keyword id: The Id of the databricks virtual network. + :paramtype id: str + """ + super().__init__(**kwargs) self.id = id -class VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork(msrest.serialization.Model): +class VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork(_serialization.Model): """The remote virtual network should be in the same region. See here to learn more (https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/vnet-peering). - :param id: The Id of the remote virtual network. - :type id: str + :ivar id: The Id of the remote virtual network. + :vartype id: str """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - *, - id: Optional[str] = None, - **kwargs - ): - super(VirtualNetworkPeeringPropertiesFormatRemoteVirtualNetwork, self).__init__(**kwargs) + def __init__(self, *, id: Optional[str] = None, **kwargs): # pylint: disable=redefined-builtin + """ + :keyword id: The Id of the remote virtual network. + :paramtype id: str + """ + super().__init__(**kwargs) self.id = id -class Workspace(TrackedResource): +class Workspace(TrackedResource): # pylint: disable=too-many-instance-attributes """Information about workspace. Variables are only populated by the server, and will be ignored when sending a request. @@ -1063,34 +1596,32 @@ class Workspace(TrackedResource): :ivar type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts. :vartype type: str - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] - :param location: Required. The geo-location where the resource lives. - :type location: str - :param sku: The SKU of the resource. - :type sku: ~azure_databricks_management_client.models.Sku + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar sku: The SKU of the resource. + :vartype sku: ~azure.mgmt.databricks.models.Sku :ivar system_data: The system metadata relating to this resource. - :vartype system_data: ~azure_databricks_management_client.models.SystemData - :param managed_resource_group_id: Required. The managed resource group Id. - :type managed_resource_group_id: str - :param parameters: The workspace's custom parameters. - :type parameters: ~azure_databricks_management_client.models.WorkspaceCustomParameters - :ivar provisioning_state: The workspace provisioning state. Possible values include: - "Accepted", "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled", - "Failed", "Succeeded", "Updating". - :vartype provisioning_state: str or - ~azure_databricks_management_client.models.ProvisioningState - :param ui_definition_uri: The blob URI where the UI definition file is located. - :type ui_definition_uri: str - :param authorizations: The workspace provider authorizations. - :type authorizations: - list[~azure_databricks_management_client.models.WorkspaceProviderAuthorization] - :param created_by: Indicates the Object ID, PUID and Application ID of entity that created the + :vartype system_data: ~azure.mgmt.databricks.models.SystemData + :ivar managed_resource_group_id: The managed resource group Id. Required. + :vartype managed_resource_group_id: str + :ivar parameters: The workspace's custom parameters. + :vartype parameters: ~azure.mgmt.databricks.models.WorkspaceCustomParameters + :ivar provisioning_state: The workspace provisioning state. Known values are: "Accepted", + "Running", "Ready", "Creating", "Created", "Deleting", "Deleted", "Canceled", "Failed", + "Succeeded", and "Updating". + :vartype provisioning_state: str or ~azure.mgmt.databricks.models.ProvisioningState + :ivar ui_definition_uri: The blob URI where the UI definition file is located. + :vartype ui_definition_uri: str + :ivar authorizations: The workspace provider authorizations. + :vartype authorizations: list[~azure.mgmt.databricks.models.WorkspaceProviderAuthorization] + :ivar created_by: Indicates the Object ID, PUID and Application ID of entity that created the workspace. - :type created_by: ~azure_databricks_management_client.models.CreatedBy - :param updated_by: Indicates the Object ID, PUID and Application ID of entity that last updated + :vartype created_by: ~azure.mgmt.databricks.models.CreatedBy + :ivar updated_by: Indicates the Object ID, PUID and Application ID of entity that last updated the workspace. - :type updated_by: ~azure_databricks_management_client.models.CreatedBy + :vartype updated_by: ~azure.mgmt.databricks.models.CreatedBy :ivar created_date_time: Specifies the date and time when the workspace is created. :vartype created_date_time: ~datetime.datetime :ivar workspace_id: The unique identifier of the databricks workspace in databricks control @@ -1099,84 +1630,135 @@ class Workspace(TrackedResource): :ivar workspace_url: The workspace URL which is of the format 'adb-{workspaceId}.{random}.azuredatabricks.net'. :vartype workspace_url: str - :param storage_account_identity: The details of Managed Identity of Storage Account. - :type storage_account_identity: - ~azure_databricks_management_client.models.ManagedIdentityConfiguration - :param encryption: Encryption properties for databricks workspace. - :type encryption: ~azure_databricks_management_client.models.WorkspacePropertiesEncryption + :ivar storage_account_identity: The details of Managed Identity of Storage Account. + :vartype storage_account_identity: ~azure.mgmt.databricks.models.ManagedIdentityConfiguration + :ivar managed_disk_identity: The details of Managed Identity of Disk Encryption Set used for + Managed Disk Encryption. + :vartype managed_disk_identity: ~azure.mgmt.databricks.models.ManagedIdentityConfiguration + :ivar disk_encryption_set_id: The resource Id of the managed disk encryption set. + :vartype disk_encryption_set_id: str + :ivar encryption: Encryption properties for databricks workspace. + :vartype encryption: ~azure.mgmt.databricks.models.WorkspacePropertiesEncryption :ivar private_endpoint_connections: Private endpoint connections created on the workspace. :vartype private_endpoint_connections: - list[~azure_databricks_management_client.models.PrivateEndpointConnection] - :param public_network_access: The network access type for accessing workspace. Set value to - disabled to access workspace only via private link. Possible values include: "Enabled", - "Disabled". - :type public_network_access: str or - ~azure_databricks_management_client.models.PublicNetworkAccess - :param required_nsg_rules: Gets or sets a value indicating whether data plane (clusters) to + list[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :ivar public_network_access: The network access type for accessing workspace. Set value to + disabled to access workspace only via private link. Known values are: "Enabled" and "Disabled". + :vartype public_network_access: str or ~azure.mgmt.databricks.models.PublicNetworkAccess + :ivar required_nsg_rules: Gets or sets a value indicating whether data plane (clusters) to control plane communication happen over private endpoint. Supported values are 'AllRules' and - 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Possible values - include: "AllRules", "NoAzureDatabricksRules", "NoAzureServiceRules". - :type required_nsg_rules: str or ~azure_databricks_management_client.models.RequiredNsgRules + 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Known values + are: "AllRules", "NoAzureDatabricksRules", and "NoAzureServiceRules". + :vartype required_nsg_rules: str or ~azure.mgmt.databricks.models.RequiredNsgRules """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'location': {'required': True}, - 'system_data': {'readonly': True}, - 'managed_resource_group_id': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'workspace_id': {'readonly': True}, - 'workspace_url': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "location": {"required": True}, + "system_data": {"readonly": True}, + "managed_resource_group_id": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_date_time": {"readonly": True}, + "workspace_id": {"readonly": True}, + "workspace_url": {"readonly": True}, + "private_endpoint_connections": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'managed_resource_group_id': {'key': 'properties.managedResourceGroupId', 'type': 'str'}, - 'parameters': {'key': 'properties.parameters', 'type': 'WorkspaceCustomParameters'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'ui_definition_uri': {'key': 'properties.uiDefinitionUri', 'type': 'str'}, - 'authorizations': {'key': 'properties.authorizations', 'type': '[WorkspaceProviderAuthorization]'}, - 'created_by': {'key': 'properties.createdBy', 'type': 'CreatedBy'}, - 'updated_by': {'key': 'properties.updatedBy', 'type': 'CreatedBy'}, - 'created_date_time': {'key': 'properties.createdDateTime', 'type': 'iso-8601'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - 'workspace_url': {'key': 'properties.workspaceUrl', 'type': 'str'}, - 'storage_account_identity': {'key': 'properties.storageAccountIdentity', 'type': 'ManagedIdentityConfiguration'}, - 'encryption': {'key': 'properties.encryption', 'type': 'WorkspacePropertiesEncryption'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'required_nsg_rules': {'key': 'properties.requiredNsgRules', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "managed_resource_group_id": {"key": "properties.managedResourceGroupId", "type": "str"}, + "parameters": {"key": "properties.parameters", "type": "WorkspaceCustomParameters"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "ui_definition_uri": {"key": "properties.uiDefinitionUri", "type": "str"}, + "authorizations": {"key": "properties.authorizations", "type": "[WorkspaceProviderAuthorization]"}, + "created_by": {"key": "properties.createdBy", "type": "CreatedBy"}, + "updated_by": {"key": "properties.updatedBy", "type": "CreatedBy"}, + "created_date_time": {"key": "properties.createdDateTime", "type": "iso-8601"}, + "workspace_id": {"key": "properties.workspaceId", "type": "str"}, + "workspace_url": {"key": "properties.workspaceUrl", "type": "str"}, + "storage_account_identity": { + "key": "properties.storageAccountIdentity", + "type": "ManagedIdentityConfiguration", + }, + "managed_disk_identity": {"key": "properties.managedDiskIdentity", "type": "ManagedIdentityConfiguration"}, + "disk_encryption_set_id": {"key": "properties.diskEncryptionSetId", "type": "str"}, + "encryption": {"key": "properties.encryption", "type": "WorkspacePropertiesEncryption"}, + "private_endpoint_connections": { + "key": "properties.privateEndpointConnections", + "type": "[PrivateEndpointConnection]", + }, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "required_nsg_rules": {"key": "properties.requiredNsgRules", "type": "str"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, location: str, managed_resource_group_id: str, tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, - parameters: Optional["WorkspaceCustomParameters"] = None, + sku: Optional["_models.Sku"] = None, + parameters: Optional["_models.WorkspaceCustomParameters"] = None, ui_definition_uri: Optional[str] = None, - authorizations: Optional[List["WorkspaceProviderAuthorization"]] = None, - created_by: Optional["CreatedBy"] = None, - updated_by: Optional["CreatedBy"] = None, - storage_account_identity: Optional["ManagedIdentityConfiguration"] = None, - encryption: Optional["WorkspacePropertiesEncryption"] = None, - public_network_access: Optional[Union[str, "PublicNetworkAccess"]] = None, - required_nsg_rules: Optional[Union[str, "RequiredNsgRules"]] = None, + authorizations: Optional[List["_models.WorkspaceProviderAuthorization"]] = None, + created_by: Optional["_models.CreatedBy"] = None, + updated_by: Optional["_models.CreatedBy"] = None, + storage_account_identity: Optional["_models.ManagedIdentityConfiguration"] = None, + managed_disk_identity: Optional["_models.ManagedIdentityConfiguration"] = None, + disk_encryption_set_id: Optional[str] = None, + encryption: Optional["_models.WorkspacePropertiesEncryption"] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + required_nsg_rules: Optional[Union[str, "_models.RequiredNsgRules"]] = None, **kwargs ): - super(Workspace, self).__init__(tags=tags, location=location, **kwargs) + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + :keyword location: The geo-location where the resource lives. Required. + :paramtype location: str + :keyword sku: The SKU of the resource. + :paramtype sku: ~azure.mgmt.databricks.models.Sku + :keyword managed_resource_group_id: The managed resource group Id. Required. + :paramtype managed_resource_group_id: str + :keyword parameters: The workspace's custom parameters. + :paramtype parameters: ~azure.mgmt.databricks.models.WorkspaceCustomParameters + :keyword ui_definition_uri: The blob URI where the UI definition file is located. + :paramtype ui_definition_uri: str + :keyword authorizations: The workspace provider authorizations. + :paramtype authorizations: list[~azure.mgmt.databricks.models.WorkspaceProviderAuthorization] + :keyword created_by: Indicates the Object ID, PUID and Application ID of entity that created + the workspace. + :paramtype created_by: ~azure.mgmt.databricks.models.CreatedBy + :keyword updated_by: Indicates the Object ID, PUID and Application ID of entity that last + updated the workspace. + :paramtype updated_by: ~azure.mgmt.databricks.models.CreatedBy + :keyword storage_account_identity: The details of Managed Identity of Storage Account. + :paramtype storage_account_identity: ~azure.mgmt.databricks.models.ManagedIdentityConfiguration + :keyword managed_disk_identity: The details of Managed Identity of Disk Encryption Set used for + Managed Disk Encryption. + :paramtype managed_disk_identity: ~azure.mgmt.databricks.models.ManagedIdentityConfiguration + :keyword disk_encryption_set_id: The resource Id of the managed disk encryption set. + :paramtype disk_encryption_set_id: str + :keyword encryption: Encryption properties for databricks workspace. + :paramtype encryption: ~azure.mgmt.databricks.models.WorkspacePropertiesEncryption + :keyword public_network_access: The network access type for accessing workspace. Set value to + disabled to access workspace only via private link. Known values are: "Enabled" and "Disabled". + :paramtype public_network_access: str or ~azure.mgmt.databricks.models.PublicNetworkAccess + :keyword required_nsg_rules: Gets or sets a value indicating whether data plane (clusters) to + control plane communication happen over private endpoint. Supported values are 'AllRules' and + 'NoAzureDatabricksRules'. 'NoAzureServiceRules' value is for internal use only. Known values + are: "AllRules", "NoAzureDatabricksRules", and "NoAzureServiceRules". + :paramtype required_nsg_rules: str or ~azure.mgmt.databricks.models.RequiredNsgRules + """ + super().__init__(tags=tags, location=location, **kwargs) self.sku = sku self.system_data = None self.managed_resource_group_id = managed_resource_group_id @@ -1190,191 +1772,237 @@ def __init__( self.workspace_id = None self.workspace_url = None self.storage_account_identity = storage_account_identity + self.managed_disk_identity = managed_disk_identity + self.disk_encryption_set_id = disk_encryption_set_id self.encryption = encryption self.private_endpoint_connections = None self.public_network_access = public_network_access self.required_nsg_rules = required_nsg_rules -class WorkspaceCustomBooleanParameter(msrest.serialization.Model): +class WorkspaceCustomBooleanParameter(_serialization.Model): """The value which should be used for this field. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", + :ivar type: The type of variable that this is. Known values are: "Bool", "Object", and "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: Required. The value which should be used for this field. - :type value: bool + :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :ivar value: The value which should be used for this field. Required. + :vartype value: bool """ _validation = { - 'type': {'readonly': True}, - 'value': {'required': True}, + "type": {"readonly": True}, + "value": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'bool'}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "bool"}, } - def __init__( - self, - *, - value: bool, - **kwargs - ): - super(WorkspaceCustomBooleanParameter, self).__init__(**kwargs) + def __init__(self, *, value: bool, **kwargs): + """ + :keyword value: The value which should be used for this field. Required. + :paramtype value: bool + """ + super().__init__(**kwargs) self.type = None self.value = value -class WorkspaceCustomObjectParameter(msrest.serialization.Model): +class WorkspaceCustomObjectParameter(_serialization.Model): """The value which should be used for this field. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", + :ivar type: The type of variable that this is. Known values are: "Bool", "Object", and "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: Required. The value which should be used for this field. - :type value: any + :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :ivar value: The value which should be used for this field. Required. + :vartype value: JSON """ _validation = { - 'type': {'readonly': True}, - 'value': {'required': True}, + "type": {"readonly": True}, + "value": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "object"}, } - def __init__( - self, - *, - value: Any, - **kwargs - ): - super(WorkspaceCustomObjectParameter, self).__init__(**kwargs) + def __init__(self, *, value: JSON, **kwargs): + """ + :keyword value: The value which should be used for this field. Required. + :paramtype value: JSON + """ + super().__init__(**kwargs) self.type = None self.value = value -class WorkspaceCustomParameters(msrest.serialization.Model): +class WorkspaceCustomParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes """Custom Parameters used for Cluster Creation. Variables are only populated by the server, and will be ignored when sending a request. - :param aml_workspace_id: The ID of a Azure Machine Learning workspace to link with Databricks + :ivar aml_workspace_id: The ID of a Azure Machine Learning workspace to link with Databricks workspace. - :type aml_workspace_id: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param custom_virtual_network_id: The ID of a Virtual Network where this Databricks Cluster + :vartype aml_workspace_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar custom_virtual_network_id: The ID of a Virtual Network where this Databricks Cluster should be created. - :type custom_virtual_network_id: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param custom_public_subnet_name: The name of a Public Subnet within the Virtual Network. - :type custom_public_subnet_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param custom_private_subnet_name: The name of the Private Subnet within the Virtual Network. - :type custom_private_subnet_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param enable_no_public_ip: Should the Public IP be Disabled?. - :type enable_no_public_ip: - ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter - :param load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for + :vartype custom_virtual_network_id: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar custom_public_subnet_name: The name of a Public Subnet within the Virtual Network. + :vartype custom_public_subnet_name: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar custom_private_subnet_name: The name of the Private Subnet within the Virtual Network. + :vartype custom_private_subnet_name: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar enable_no_public_ip: Should the Public IP be Disabled?. + :vartype enable_no_public_ip: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :ivar load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for Secure Cluster Connectivity (No Public IP). - :type load_balancer_backend_pool_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param load_balancer_id: Resource URI of Outbound Load balancer for Secure Cluster Connectivity + :vartype load_balancer_backend_pool_name: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar load_balancer_id: Resource URI of Outbound Load balancer for Secure Cluster Connectivity (No Public IP) workspace. - :type load_balancer_id: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param nat_gateway_name: Name of the NAT gateway for Secure Cluster Connectivity (No Public IP) + :vartype load_balancer_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar nat_gateway_name: Name of the NAT gateway for Secure Cluster Connectivity (No Public IP) workspace subnets. - :type nat_gateway_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param public_ip_name: Name of the Public IP for No Public IP workspace with managed vNet. - :type public_ip_name: ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param prepare_encryption: Prepare the workspace for encryption. Enables the Managed Identity + :vartype nat_gateway_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar public_ip_name: Name of the Public IP for No Public IP workspace with managed vNet. + :vartype public_ip_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar prepare_encryption: Prepare the workspace for encryption. Enables the Managed Identity for managed storage account. - :type prepare_encryption: - ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter - :param encryption: Contains the encryption details for Customer-Managed Key (CMK) enabled + :vartype prepare_encryption: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :ivar encryption: Contains the encryption details for Customer-Managed Key (CMK) enabled workspace. - :type encryption: ~azure_databricks_management_client.models.WorkspaceEncryptionParameter - :param require_infrastructure_encryption: A boolean indicating whether or not the DBFS root - file system will be enabled with secondary layer of encryption with platform managed keys for - data at rest. - :type require_infrastructure_encryption: - ~azure_databricks_management_client.models.WorkspaceCustomBooleanParameter - :param storage_account_name: Default DBFS storage account name. - :type storage_account_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param storage_account_sku_name: Storage account SKU name, ex: Standard_GRS, Standard_LRS. - Refer https://aka.ms/storageskus for valid inputs. - :type storage_account_sku_name: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter - :param vnet_address_prefix: Address prefix for Managed virtual network. Default value for this + :vartype encryption: ~azure.mgmt.databricks.models.WorkspaceEncryptionParameter + :ivar require_infrastructure_encryption: A boolean indicating whether or not the DBFS root file + system will be enabled with secondary layer of encryption with platform managed keys for data + at rest. + :vartype require_infrastructure_encryption: + ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :ivar storage_account_name: Default DBFS storage account name. + :vartype storage_account_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar storage_account_sku_name: Storage account SKU name, ex: Standard_GRS, Standard_LRS. Refer + https://aka.ms/storageskus for valid inputs. + :vartype storage_account_sku_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :ivar vnet_address_prefix: Address prefix for Managed virtual network. Default value for this input is 10.139. - :type vnet_address_prefix: - ~azure_databricks_management_client.models.WorkspaceCustomStringParameter + :vartype vnet_address_prefix: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter :ivar resource_tags: Tags applied to resources under Managed resource group. These can be updated by updating tags at workspace level. - :vartype resource_tags: - ~azure_databricks_management_client.models.WorkspaceCustomObjectParameter + :vartype resource_tags: ~azure.mgmt.databricks.models.WorkspaceCustomObjectParameter """ _validation = { - 'resource_tags': {'readonly': True}, + "resource_tags": {"readonly": True}, } _attribute_map = { - 'aml_workspace_id': {'key': 'amlWorkspaceId', 'type': 'WorkspaceCustomStringParameter'}, - 'custom_virtual_network_id': {'key': 'customVirtualNetworkId', 'type': 'WorkspaceCustomStringParameter'}, - 'custom_public_subnet_name': {'key': 'customPublicSubnetName', 'type': 'WorkspaceCustomStringParameter'}, - 'custom_private_subnet_name': {'key': 'customPrivateSubnetName', 'type': 'WorkspaceCustomStringParameter'}, - 'enable_no_public_ip': {'key': 'enableNoPublicIp', 'type': 'WorkspaceCustomBooleanParameter'}, - 'load_balancer_backend_pool_name': {'key': 'loadBalancerBackendPoolName', 'type': 'WorkspaceCustomStringParameter'}, - 'load_balancer_id': {'key': 'loadBalancerId', 'type': 'WorkspaceCustomStringParameter'}, - 'nat_gateway_name': {'key': 'natGatewayName', 'type': 'WorkspaceCustomStringParameter'}, - 'public_ip_name': {'key': 'publicIpName', 'type': 'WorkspaceCustomStringParameter'}, - 'prepare_encryption': {'key': 'prepareEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, - 'encryption': {'key': 'encryption', 'type': 'WorkspaceEncryptionParameter'}, - 'require_infrastructure_encryption': {'key': 'requireInfrastructureEncryption', 'type': 'WorkspaceCustomBooleanParameter'}, - 'storage_account_name': {'key': 'storageAccountName', 'type': 'WorkspaceCustomStringParameter'}, - 'storage_account_sku_name': {'key': 'storageAccountSkuName', 'type': 'WorkspaceCustomStringParameter'}, - 'vnet_address_prefix': {'key': 'vnetAddressPrefix', 'type': 'WorkspaceCustomStringParameter'}, - 'resource_tags': {'key': 'resourceTags', 'type': 'WorkspaceCustomObjectParameter'}, + "aml_workspace_id": {"key": "amlWorkspaceId", "type": "WorkspaceCustomStringParameter"}, + "custom_virtual_network_id": {"key": "customVirtualNetworkId", "type": "WorkspaceCustomStringParameter"}, + "custom_public_subnet_name": {"key": "customPublicSubnetName", "type": "WorkspaceCustomStringParameter"}, + "custom_private_subnet_name": {"key": "customPrivateSubnetName", "type": "WorkspaceCustomStringParameter"}, + "enable_no_public_ip": {"key": "enableNoPublicIp", "type": "WorkspaceCustomBooleanParameter"}, + "load_balancer_backend_pool_name": { + "key": "loadBalancerBackendPoolName", + "type": "WorkspaceCustomStringParameter", + }, + "load_balancer_id": {"key": "loadBalancerId", "type": "WorkspaceCustomStringParameter"}, + "nat_gateway_name": {"key": "natGatewayName", "type": "WorkspaceCustomStringParameter"}, + "public_ip_name": {"key": "publicIpName", "type": "WorkspaceCustomStringParameter"}, + "prepare_encryption": {"key": "prepareEncryption", "type": "WorkspaceCustomBooleanParameter"}, + "encryption": {"key": "encryption", "type": "WorkspaceEncryptionParameter"}, + "require_infrastructure_encryption": { + "key": "requireInfrastructureEncryption", + "type": "WorkspaceCustomBooleanParameter", + }, + "storage_account_name": {"key": "storageAccountName", "type": "WorkspaceCustomStringParameter"}, + "storage_account_sku_name": {"key": "storageAccountSkuName", "type": "WorkspaceCustomStringParameter"}, + "vnet_address_prefix": {"key": "vnetAddressPrefix", "type": "WorkspaceCustomStringParameter"}, + "resource_tags": {"key": "resourceTags", "type": "WorkspaceCustomObjectParameter"}, } def __init__( self, *, - aml_workspace_id: Optional["WorkspaceCustomStringParameter"] = None, - custom_virtual_network_id: Optional["WorkspaceCustomStringParameter"] = None, - custom_public_subnet_name: Optional["WorkspaceCustomStringParameter"] = None, - custom_private_subnet_name: Optional["WorkspaceCustomStringParameter"] = None, - enable_no_public_ip: Optional["WorkspaceCustomBooleanParameter"] = None, - load_balancer_backend_pool_name: Optional["WorkspaceCustomStringParameter"] = None, - load_balancer_id: Optional["WorkspaceCustomStringParameter"] = None, - nat_gateway_name: Optional["WorkspaceCustomStringParameter"] = None, - public_ip_name: Optional["WorkspaceCustomStringParameter"] = None, - prepare_encryption: Optional["WorkspaceCustomBooleanParameter"] = None, - encryption: Optional["WorkspaceEncryptionParameter"] = None, - require_infrastructure_encryption: Optional["WorkspaceCustomBooleanParameter"] = None, - storage_account_name: Optional["WorkspaceCustomStringParameter"] = None, - storage_account_sku_name: Optional["WorkspaceCustomStringParameter"] = None, - vnet_address_prefix: Optional["WorkspaceCustomStringParameter"] = None, + aml_workspace_id: Optional["_models.WorkspaceCustomStringParameter"] = None, + custom_virtual_network_id: Optional["_models.WorkspaceCustomStringParameter"] = None, + custom_public_subnet_name: Optional["_models.WorkspaceCustomStringParameter"] = None, + custom_private_subnet_name: Optional["_models.WorkspaceCustomStringParameter"] = None, + enable_no_public_ip: Optional["_models.WorkspaceCustomBooleanParameter"] = None, + load_balancer_backend_pool_name: Optional["_models.WorkspaceCustomStringParameter"] = None, + load_balancer_id: Optional["_models.WorkspaceCustomStringParameter"] = None, + nat_gateway_name: Optional["_models.WorkspaceCustomStringParameter"] = None, + public_ip_name: Optional["_models.WorkspaceCustomStringParameter"] = None, + prepare_encryption: Optional["_models.WorkspaceCustomBooleanParameter"] = None, + encryption: Optional["_models.WorkspaceEncryptionParameter"] = None, + require_infrastructure_encryption: Optional["_models.WorkspaceCustomBooleanParameter"] = None, + storage_account_name: Optional["_models.WorkspaceCustomStringParameter"] = None, + storage_account_sku_name: Optional["_models.WorkspaceCustomStringParameter"] = None, + vnet_address_prefix: Optional["_models.WorkspaceCustomStringParameter"] = None, **kwargs ): - super(WorkspaceCustomParameters, self).__init__(**kwargs) + """ + :keyword aml_workspace_id: The ID of a Azure Machine Learning workspace to link with Databricks + workspace. + :paramtype aml_workspace_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword custom_virtual_network_id: The ID of a Virtual Network where this Databricks Cluster + should be created. + :paramtype custom_virtual_network_id: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword custom_public_subnet_name: The name of a Public Subnet within the Virtual Network. + :paramtype custom_public_subnet_name: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword custom_private_subnet_name: The name of the Private Subnet within the Virtual Network. + :paramtype custom_private_subnet_name: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword enable_no_public_ip: Should the Public IP be Disabled?. + :paramtype enable_no_public_ip: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :keyword load_balancer_backend_pool_name: Name of the outbound Load Balancer Backend Pool for + Secure Cluster Connectivity (No Public IP). + :paramtype load_balancer_backend_pool_name: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword load_balancer_id: Resource URI of Outbound Load balancer for Secure Cluster + Connectivity (No Public IP) workspace. + :paramtype load_balancer_id: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword nat_gateway_name: Name of the NAT gateway for Secure Cluster Connectivity (No Public + IP) workspace subnets. + :paramtype nat_gateway_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword public_ip_name: Name of the Public IP for No Public IP workspace with managed vNet. + :paramtype public_ip_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword prepare_encryption: Prepare the workspace for encryption. Enables the Managed Identity + for managed storage account. + :paramtype prepare_encryption: ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :keyword encryption: Contains the encryption details for Customer-Managed Key (CMK) enabled + workspace. + :paramtype encryption: ~azure.mgmt.databricks.models.WorkspaceEncryptionParameter + :keyword require_infrastructure_encryption: A boolean indicating whether or not the DBFS root + file system will be enabled with secondary layer of encryption with platform managed keys for + data at rest. + :paramtype require_infrastructure_encryption: + ~azure.mgmt.databricks.models.WorkspaceCustomBooleanParameter + :keyword storage_account_name: Default DBFS storage account name. + :paramtype storage_account_name: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword storage_account_sku_name: Storage account SKU name, ex: Standard_GRS, Standard_LRS. + Refer https://aka.ms/storageskus for valid inputs. + :paramtype storage_account_sku_name: + ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + :keyword vnet_address_prefix: Address prefix for Managed virtual network. Default value for + this input is 10.139. + :paramtype vnet_address_prefix: ~azure.mgmt.databricks.models.WorkspaceCustomStringParameter + """ + super().__init__(**kwargs) self.aml_workspace_id = aml_workspace_id self.custom_virtual_network_id = custom_virtual_network_id self.custom_public_subnet_name = custom_public_subnet_name @@ -1393,178 +2021,177 @@ def __init__( self.resource_tags = None -class WorkspaceCustomStringParameter(msrest.serialization.Model): +class WorkspaceCustomStringParameter(_serialization.Model): """The Value. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", + :ivar type: The type of variable that this is. Known values are: "Bool", "Object", and "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: Required. The value which should be used for this field. - :type value: str + :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :ivar value: The value which should be used for this field. Required. + :vartype value: str """ _validation = { - 'type': {'readonly': True}, - 'value': {'required': True}, + "type": {"readonly": True}, + "value": {"required": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - *, - value: str, - **kwargs - ): - super(WorkspaceCustomStringParameter, self).__init__(**kwargs) + def __init__(self, *, value: str, **kwargs): + """ + :keyword value: The value which should be used for this field. Required. + :paramtype value: str + """ + super().__init__(**kwargs) self.type = None self.value = value -class WorkspaceEncryptionParameter(msrest.serialization.Model): +class WorkspaceEncryptionParameter(_serialization.Model): """The object that contains details of encryption used on the workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar type: The type of variable that this is. Possible values include: "Bool", "Object", + :ivar type: The type of variable that this is. Known values are: "Bool", "Object", and "String". - :vartype type: str or ~azure_databricks_management_client.models.CustomParameterType - :param value: The value which should be used for this field. - :type value: ~azure_databricks_management_client.models.Encryption + :vartype type: str or ~azure.mgmt.databricks.models.CustomParameterType + :ivar value: The value which should be used for this field. + :vartype value: ~azure.mgmt.databricks.models.Encryption """ _validation = { - 'type': {'readonly': True}, + "type": {"readonly": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'Encryption'}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "Encryption"}, } - def __init__( - self, - *, - value: Optional["Encryption"] = None, - **kwargs - ): - super(WorkspaceEncryptionParameter, self).__init__(**kwargs) + def __init__(self, *, value: Optional["_models.Encryption"] = None, **kwargs): + """ + :keyword value: The value which should be used for this field. + :paramtype value: ~azure.mgmt.databricks.models.Encryption + """ + super().__init__(**kwargs) self.type = None self.value = value -class WorkspaceListResult(msrest.serialization.Model): +class WorkspaceListResult(_serialization.Model): """List of workspaces. - :param value: The array of workspaces. - :type value: list[~azure_databricks_management_client.models.Workspace] - :param next_link: The URL to use for getting the next set of results. - :type next_link: str + :ivar value: The array of workspaces. + :vartype value: list[~azure.mgmt.databricks.models.Workspace] + :ivar next_link: The URL to use for getting the next set of results. + :vartype next_link: str """ _attribute_map = { - 'value': {'key': 'value', 'type': '[Workspace]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Workspace]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - *, - value: Optional[List["Workspace"]] = None, - next_link: Optional[str] = None, - **kwargs - ): - super(WorkspaceListResult, self).__init__(**kwargs) + def __init__(self, *, value: Optional[List["_models.Workspace"]] = None, next_link: Optional[str] = None, **kwargs): + """ + :keyword value: The array of workspaces. + :paramtype value: list[~azure.mgmt.databricks.models.Workspace] + :keyword next_link: The URL to use for getting the next set of results. + :paramtype next_link: str + """ + super().__init__(**kwargs) self.value = value self.next_link = next_link -class WorkspacePropertiesEncryption(msrest.serialization.Model): +class WorkspacePropertiesEncryption(_serialization.Model): """Encryption properties for databricks workspace. All required parameters must be populated in order to send to Azure. - :param entities: Required. Encryption entities definition for the workspace. - :type entities: ~azure_databricks_management_client.models.EncryptionEntitiesDefinition + :ivar entities: Encryption entities definition for the workspace. Required. + :vartype entities: ~azure.mgmt.databricks.models.EncryptionEntitiesDefinition """ _validation = { - 'entities': {'required': True}, + "entities": {"required": True}, } _attribute_map = { - 'entities': {'key': 'entities', 'type': 'EncryptionEntitiesDefinition'}, + "entities": {"key": "entities", "type": "EncryptionEntitiesDefinition"}, } - def __init__( - self, - *, - entities: "EncryptionEntitiesDefinition", - **kwargs - ): - super(WorkspacePropertiesEncryption, self).__init__(**kwargs) + def __init__(self, *, entities: "_models.EncryptionEntitiesDefinition", **kwargs): + """ + :keyword entities: Encryption entities definition for the workspace. Required. + :paramtype entities: ~azure.mgmt.databricks.models.EncryptionEntitiesDefinition + """ + super().__init__(**kwargs) self.entities = entities -class WorkspaceProviderAuthorization(msrest.serialization.Model): +class WorkspaceProviderAuthorization(_serialization.Model): """The workspace provider authorization. All required parameters must be populated in order to send to Azure. - :param principal_id: Required. The provider's principal identifier. This is the identity that - the provider will use to call ARM to manage the workspace resources. - :type principal_id: str - :param role_definition_id: Required. The provider's role definition identifier. This role will - define all the permissions that the provider must have on the workspace's container resource - group. This role definition cannot have permission to delete the resource group. - :type role_definition_id: str + :ivar principal_id: The provider's principal identifier. This is the identity that the provider + will use to call ARM to manage the workspace resources. Required. + :vartype principal_id: str + :ivar role_definition_id: The provider's role definition identifier. This role will define all + the permissions that the provider must have on the workspace's container resource group. This + role definition cannot have permission to delete the resource group. Required. + :vartype role_definition_id: str """ _validation = { - 'principal_id': {'required': True}, - 'role_definition_id': {'required': True}, + "principal_id": {"required": True}, + "role_definition_id": {"required": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'role_definition_id': {'key': 'roleDefinitionId', 'type': 'str'}, + "principal_id": {"key": "principalId", "type": "str"}, + "role_definition_id": {"key": "roleDefinitionId", "type": "str"}, } - def __init__( - self, - *, - principal_id: str, - role_definition_id: str, - **kwargs - ): - super(WorkspaceProviderAuthorization, self).__init__(**kwargs) + def __init__(self, *, principal_id: str, role_definition_id: str, **kwargs): + """ + :keyword principal_id: The provider's principal identifier. This is the identity that the + provider will use to call ARM to manage the workspace resources. Required. + :paramtype principal_id: str + :keyword role_definition_id: The provider's role definition identifier. This role will define + all the permissions that the provider must have on the workspace's container resource group. + This role definition cannot have permission to delete the resource group. Required. + :paramtype role_definition_id: str + """ + super().__init__(**kwargs) self.principal_id = principal_id self.role_definition_id = role_definition_id -class WorkspaceUpdate(msrest.serialization.Model): +class WorkspaceUpdate(_serialization.Model): """An update to a workspace. - :param tags: A set of tags. Resource tags. - :type tags: dict[str, str] + :ivar tags: Resource tags. + :vartype tags: dict[str, str] """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - *, - tags: Optional[Dict[str, str]] = None, - **kwargs - ): - super(WorkspaceUpdate, self).__init__(**kwargs) + def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs): + """ + :keyword tags: Resource tags. + :paramtype tags: dict[str, str] + """ + super().__init__(**kwargs) self.tags = tags diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_patch.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/models/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py index 6fc97620650a..6de22f808b13 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/__init__.py @@ -10,12 +10,22 @@ from ._operations import Operations from ._private_link_resources_operations import PrivateLinkResourcesOperations from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations +from ._outbound_network_dependencies_endpoints_operations import OutboundNetworkDependenciesEndpointsOperations from ._vnet_peering_operations import VNetPeeringOperations +from ._access_connectors_operations import AccessConnectorsOperations + +from ._patch import __all__ as _patch_all +from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'WorkspacesOperations', - 'Operations', - 'PrivateLinkResourcesOperations', - 'PrivateEndpointConnectionsOperations', - 'VNetPeeringOperations', + "WorkspacesOperations", + "Operations", + "PrivateLinkResourcesOperations", + "PrivateEndpointConnectionsOperations", + "OutboundNetworkDependenciesEndpointsOperations", + "VNetPeeringOperations", + "AccessConnectorsOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py new file mode 100644 index 000000000000..c0058ae6238b --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_access_connectors_operations.py @@ -0,0 +1,1058 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import sys +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, connector_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "connectorName": _SERIALIZER.url("connector_name", connector_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/accessConnectors") + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class AccessConnectorsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.AzureDatabricksManagementClient`'s + :attr:`access_connectors` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> _models.AccessConnector: + """Gets an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: AccessConnector or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.AccessConnector + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + + request = build_get_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, connector_name: str, **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, connector_name: str, **kwargs: Any) -> LROPoller[None]: + """Deletes the azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._delete_initial( # type: ignore + resource_group_name=resource_group_name, + connector_name=connector_name, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnector, IO], + **kwargs: Any + ) -> _models.AccessConnector: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "AccessConnector") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if response.status_code == 200: + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if response.status_code == 201: + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + connector_name: str, + parameters: _models.AccessConnector, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AccessConnector]: + """Creates or updates azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: Parameters supplied to the create or update an azure databricks + accessConnector. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnector + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + connector_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AccessConnector]: + """Creates or updates azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: Parameters supplied to the create or update an azure databricks + accessConnector. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnector, IO], + **kwargs: Any + ) -> LROPoller[_models.AccessConnector]: + """Creates or updates azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: Parameters supplied to the create or update an azure databricks + accessConnector. Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnector or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._create_or_update_initial( # type: ignore + resource_group_name=resource_group_name, + connector_name=connector_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AccessConnector", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + def _update_initial( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnectorUpdate, IO], + **kwargs: Any + ) -> Optional[_models.AccessConnector]: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.AccessConnector]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "AccessConnectorUpdate") + + request = build_update_request( + resource_group_name=resource_group_name, + connector_name=connector_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize("AccessConnector", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + connector_name: str, + parameters: _models.AccessConnectorUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AccessConnector]: + """Updates an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: The update to the azure databricks accessConnector. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + connector_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.AccessConnector]: + """Updates an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: The update to the azure databricks accessConnector. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + connector_name: str, + parameters: Union[_models.AccessConnectorUpdate, IO], + **kwargs: Any + ) -> LROPoller[_models.AccessConnector]: + """Updates an azure databricks accessConnector. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param connector_name: The name of the azure databricks accessConnector. Required. + :type connector_name: str + :param parameters: The update to the azure databricks accessConnector. Is either a model type + or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.AccessConnectorUpdate or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either AccessConnector or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnector] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] + if cont_token is None: + raw_result = self._update_initial( # type: ignore + resource_group_name=resource_group_name, + connector_name=connector_name, + parameters=parameters, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize("AccessConnector", pipeline_response) + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors/{connectorName}"} # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.AccessConnector"]: + """Gets all the azure databricks accessConnectors within a resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AccessConnector or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnectorListResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("AccessConnectorListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/accessConnectors"} # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.AccessConnector"]: + """Gets all the azure databricks accessConnectors within a subscription. + + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of either AccessConnector or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.AccessConnector] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.AccessConnectorListResult] + + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize("AccessConnectorListResult", pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/accessConnectors"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py index 311dc36ebf05..57b65b67cf36 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,87 +6,138 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar +def build_list_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") -class Operations(object): - """Operations operations. + # Construct URL + _url = kwargs.pop("template_url", "/providers/Microsoft.Databricks/operations") - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.AzureDatabricksManagementClient`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config - - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.OperationListResult"] + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: """Lists all of the available RP operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OperationListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.OperationListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Operation or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.OperationListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - request = self._client.get(url, query_parameters, header_parameters) + request = build_list_request( + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('OperationListResult', pipeline_response) + deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -94,17 +146,18 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/providers/Microsoft.Databricks/operations'} # type: ignore + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/providers/Microsoft.Databricks/operations"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py new file mode 100644 index 000000000000..f0ba932cb558 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_outbound_network_dependencies_endpoints_operations.py @@ -0,0 +1,163 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import sys +from typing import Any, Callable, Dict, List, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat + +from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class OutboundNetworkDependenciesEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.AzureDatabricksManagementClient`'s + :attr:`outbound_network_dependencies_endpoints` attribute. + """ + + models = _models + + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list( + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> List[_models.OutboundEnvironmentEndpoint]: + """Gets a list of egress endpoints (network endpoints of all outbound dependencies) in the + specified Workspace. + + Gets the list of endpoints that VNET Injected Workspace calls Azure Databricks Control Plane. + You must configure outbound access with these endpoints. For more information, see + https://docs.microsoft.com/en-us/azure/databricks/administration-guide/cloud-configurations/azure/udr. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: list of OutboundEnvironmentEndpoint or the result of cls(response) + :rtype: list[~azure.mgmt.databricks.models.OutboundEnvironmentEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[List[_models.OutboundEnvironmentEndpoint]] + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + deserialized = self._deserialize("[OutboundEnvironmentEndpoint]", pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_patch.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py index 9d91ad76eb51..efe739a64ad2 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_endpoint_connections_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,103 +6,296 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_request( + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PrivateEndpointConnectionsOperations(object): - """PrivateEndpointConnectionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.AzureDatabricksManagementClient`'s + :attr:`private_endpoint_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PrivateEndpointConnectionsList"] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.PrivateEndpointConnection"]: """List private endpoint connections. List private endpoint connections of the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionsList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.PrivateEndpointConnectionsList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either PrivateEndpointConnection or the result of + cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionsList"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnectionsList] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateEndpointConnectionsList', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnectionsList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -110,343 +304,452 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections'} # type: ignore + return ItemPaged(get_next, extract_data) + + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Get private endpoint connection. Get a private endpoint connection properties for a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection. + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore def _create_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - private_endpoint_connection, # type: "_models.PrivateEndpointConnection" - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO], + **kwargs: Any + ) -> _models.PrivateEndpointConnection: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(private_endpoint_connection, 'PrivateEndpointConnection') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(private_endpoint_connection, (IO, bytes)): + _content = private_endpoint_connection + else: + _json = self._serialize.body(private_endpoint_connection, "PrivateEndpointConnection") + + request = build_create_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if response.status_code == 202: - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + _create_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + @overload def begin_create( self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - private_endpoint_connection, # type: "_models.PrivateEndpointConnection" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.PrivateEndpointConnection"] + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PrivateEndpointConnection]: """Update private endpoint connection status. Update the status of a private endpoint connection with the specified name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection. + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. :type private_endpoint_connection_name: str :param private_endpoint_connection: The private endpoint connection with updated properties. - :type private_endpoint_connection: ~azure_databricks_management_client.models.PrivateEndpointConnection + Required. + :type private_endpoint_connection: ~azure.mgmt.databricks.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either PrivateEndpointConnection or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.PrivateEndpointConnection] + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PrivateEndpointConnection or the result + of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_create( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PrivateEndpointConnection]: + """Update private endpoint connection status. + + Update the status of a private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_connection: The private endpoint connection with updated properties. + Required. + :type private_endpoint_connection: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PrivateEndpointConnection or the result + of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + private_endpoint_connection: Union[_models.PrivateEndpointConnection, IO], + **kwargs: Any + ) -> LROPoller[_models.PrivateEndpointConnection]: + """Update private endpoint connection status. + + Update the status of a private endpoint connection with the specified name. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. + :type private_endpoint_connection_name: str + :param private_endpoint_connection: The private endpoint connection with updated properties. Is + either a model type or a IO type. Required. + :type private_endpoint_connection: ~azure.mgmt.databricks.models.PrivateEndpointConnection or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PrivateEndpointConnection or the result + of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateEndpointConnection] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._create_initial( + raw_result = self._create_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, private_endpoint_connection=private_endpoint_connection, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) - + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - def _delete_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + begin_create.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + @distributed_trace def begin_delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> LROPoller[None]: """Remove private endpoint connection. Remove private endpoint connection with the specified name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param private_endpoint_connection_name: The name of the private endpoint connection. + :param private_endpoint_connection_name: The name of the private endpoint connection. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py index 15d242800838..f265dd955936 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_private_link_resources_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,101 +6,200 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_list_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request( + resource_group_name: str, workspace_name: str, group_id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "groupId": _SERIALIZER.url("group_id", group_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class PrivateLinkResourcesOperations(object): - """PrivateLinkResourcesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.AzureDatabricksManagementClient`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PrivateLinkResourcesList"] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.GroupIdInformation"]: """List private link resources. List private link resources for a given workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateLinkResourcesList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.PrivateLinkResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either GroupIdInformation or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.GroupIdInformation] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourcesList"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.PrivateLinkResourcesList] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('PrivateLinkResourcesList', pipeline_response) + deserialized = self._deserialize("PrivateLinkResourcesList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -108,83 +208,87 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources'} # type: ignore + return ItemPaged(get_next, extract_data) + list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources"} # type: ignore + + @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - group_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.GroupIdInformation" + self, resource_group_name: str, workspace_name: str, group_id: str, **kwargs: Any + ) -> _models.GroupIdInformation: """Get the specified private link resource. Get the specified private link resource for the given group id (sub-resource). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param group_id: The name of the private link resource. + :param group_id: The name of the private link resource. Required. :type group_id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: GroupIdInformation, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.GroupIdInformation - :raises: ~azure.core.exceptions.HttpResponseError + :return: GroupIdInformation or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.GroupIdInformation + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.GroupIdInformation"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'groupId': self._serialize.url("group_id", group_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.GroupIdInformation] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + group_id=group_id, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('GroupIdInformation', pipeline_response) + deserialized = self._deserialize("GroupIdInformation", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}'} # type: ignore + + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/privateLinkResources/{groupId}"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py index ec4fb642db5d..a47f62fba2c4 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_vnet_peering_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,424 +6,702 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, workspace_name: str, peering_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "peeringName": _SERIALIZER.url("peering_name", peering_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, peering_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "peeringName": _SERIALIZER.url("peering_name", peering_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, peering_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "peeringName": _SERIALIZER.url("peering_name", peering_name, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_workspace_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class VNetPeeringOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union - - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] - -class VNetPeeringOperations(object): - """VNetPeeringOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.AzureDatabricksManagementClient`'s + :attr:`vnet_peering` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") + @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - peering_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.VirtualNetworkPeering"] + self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any + ) -> Optional[_models.VirtualNetworkPeering]: """Gets the workspace vNet Peering. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param peering_name: The name of the workspace vNet peering. + :param peering_name: The name of the workspace vNet peering. Required. :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualNetworkPeering, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.VirtualNetworkPeering or None - :raises: ~azure.core.exceptions.HttpResponseError + :return: VirtualNetworkPeering or None or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.VirtualNetworkPeering or None + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkPeering"]] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.VirtualNetworkPeering]] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + peering_name=peering_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore - def _delete_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - peering_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + peering_name=peering_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + @distributed_trace def begin_delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - peering_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + self, resource_group_name: str, workspace_name: str, peering_name: str, **kwargs: Any + ) -> LROPoller[None]: """Deletes the workspace vNetPeering. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param peering_name: The name of the workspace vNet peering. + :param peering_name: The name of the workspace vNet peering. Required. :type peering_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, peering_name=peering_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - peering_name, # type: str - virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering" - **kwargs # type: Any - ): - # type: (...) -> "_models.VirtualNetworkPeering" - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"] + resource_group_name: str, + workspace_name: str, + peering_name: str, + virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO], + **kwargs: Any + ) -> _models.VirtualNetworkPeering: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualNetworkPeering] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(virtual_network_peering_parameters, (IO, bytes)): + _content = virtual_network_peering_parameters + else: + _json = self._serialize.body(virtual_network_peering_parameters, "VirtualNetworkPeering") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + peering_name=peering_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + + @overload def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - peering_name, # type: str - virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.VirtualNetworkPeering"] + resource_group_name: str, + workspace_name: str, + peering_name: str, + virtual_network_peering_parameters: _models.VirtualNetworkPeering, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.VirtualNetworkPeering]: """Creates vNet Peering for workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param peering_name: The name of the workspace vNet peering. + :param peering_name: The name of the workspace vNet peering. Required. :type peering_name: str :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet - Peering. - :type virtual_network_peering_parameters: ~azure_databricks_management_client.models.VirtualNetworkPeering + Peering. Required. + :type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. - :return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.VirtualNetworkPeering] + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + peering_name: str, + virtual_network_peering_parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.VirtualNetworkPeering]: + """Creates vNet Peering for workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param peering_name: The name of the workspace vNet peering. Required. + :type peering_name: str + :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet + Peering. Required. + :type virtual_network_peering_parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + peering_name: str, + virtual_network_peering_parameters: Union[_models.VirtualNetworkPeering, IO], + **kwargs: Any + ) -> LROPoller[_models.VirtualNetworkPeering]: + """Creates vNet Peering for workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param peering_name: The name of the workspace vNet peering. Required. + :type peering_name: str + :param virtual_network_peering_parameters: Parameters supplied to the create workspace vNet + Peering. Is either a model type or a IO type. Required. + :type virtual_network_peering_parameters: ~azure.mgmt.databricks.models.VirtualNetworkPeering + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualNetworkPeering] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._create_or_update_initial( + raw_result = self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, peering_name=peering_name, virtual_network_peering_parameters=virtual_network_peering_parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response) - + deserialized = self._deserialize("VirtualNetworkPeering", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - 'peeringName': self._serialize.url("peering_name", peering_name, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings/{peeringName}"} # type: ignore + @distributed_trace def list_by_workspace( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.VirtualNetworkPeeringList"] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.VirtualNetworkPeering"]: """Lists the workspace vNet Peerings. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either VirtualNetworkPeeringList or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.VirtualNetworkPeeringList] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either VirtualNetworkPeering or the result of + cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.VirtualNetworkPeering] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringList"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualNetworkPeeringList] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2018-04-01" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_workspace.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_workspace_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_workspace.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('VirtualNetworkPeeringList', pipeline_response) + deserialized = self._deserialize("VirtualNetworkPeeringList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -431,17 +710,18 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings'} # type: ignore + return ItemPaged(get_next, extract_data) + + list_by_workspace.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}/virtualNetworkPeerings"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py index dbfe20ee9e9c..8b4d689bec00 100644 --- a/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py +++ b/sdk/databricks/azure-mgmt-databricks/azure/mgmt/databricks/operations/_workspaces_operations.py @@ -1,3 +1,4 @@ +# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. @@ -5,528 +6,934 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING -import warnings - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import sys +from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse -from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models +from .._serialization import Serializer +from .._vendor import _convert_request, _format_url_section + +if sys.version_info >= (3, 8): + from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports +else: + from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_get_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, "str", max_length=64, min_length=3), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces", + ) # pylint: disable=line-too-long + path_format_arguments = { + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1, pattern=r"^[-\w\._\(\)]+$" + ), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", "2022-04-01-preview") + ) # type: Literal["2022-04-01-preview"] + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces") + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url = _format_url_section(_url, **path_format_arguments) -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") -class WorkspacesOperations(object): - """WorkspacesOperations operations. + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure_databricks_management_client.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.databricks.AzureDatabricksManagementClient`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Workspace" + @distributed_trace + def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: """Gets the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) - :rtype: ~azure_databricks_management_client.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError + :return: Workspace or the result of cls(response) + :rtype: ~azure.mgmt.databricks.models.Workspace + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self.get.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + + request = build_get_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.get(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore - def _delete_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + def _delete_initial( # pylint: disable=inconsistent-return-statements + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" - - # Construct URL - url = self._delete_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + + request = build_delete_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) - request = self._client.delete(url, query_parameters, header_parameters) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore - def begin_delete( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + @distributed_trace + def begin_delete(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: """Deletes the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[None] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, - cls=lambda x,y,z: x, + api_version=api_version, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) + kwargs.pop("error_map", None) - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) - - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore def _create_or_update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> "_models.Workspace" - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> _models.Workspace: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._create_or_update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'Workspace') - body_content_kwargs['content'] = body_content - request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "Workspace") + + request = build_create_or_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + @overload def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Workspace"] + resource_group_name: str, + workspace_name: str, + parameters: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: """Creates a new workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: Parameters supplied to the create or update a workspace. - :type parameters: ~azure_databricks_management_client.models.Workspace + :param parameters: Parameters supplied to the create or update a workspace. Required. + :type parameters: ~azure.mgmt.databricks.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.Workspace] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates a new workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: Parameters supplied to the create or update a workspace. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, parameters: Union[_models.Workspace, IO], **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates a new workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: Parameters supplied to the create or update a workspace. Is either a model + type or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.Workspace or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._create_or_update_initial( + raw_result = self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('Workspace', pipeline_response) - + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + + begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.WorkspaceUpdate" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.Workspace"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.WorkspaceUpdate, IO], + **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError - } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - content_type = kwargs.pop("content_type", "application/json") - accept = "application/json" - - # Construct URL - url = self._update_initial.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - - body_content_kwargs = {} # type: Dict[str, Any] - body_content = self._serialize.body(parameters, 'WorkspaceUpdate') - body_content_kwargs['content'] = body_content - request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.Workspace]] + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IO, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "WorkspaceUpdate") + + request = build_update_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + @overload def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - parameters, # type: "_models.WorkspaceUpdate" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Workspace"] + resource_group_name: str, + workspace_name: str, + parameters: _models.WorkspaceUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: """Updates a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: The name of the workspace. + :param workspace_name: The name of the workspace. Required. :type workspace_name: str - :param parameters: The update to the workspace. - :type parameters: ~azure_databricks_management_client.models.WorkspaceUpdate + :param parameters: The update to the workspace. Required. + :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. - :keyword polling: By default, your polling method will be ARMPolling. - Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod - :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) - :rtype: ~azure.core.polling.LROPoller[~azure_databricks_management_client.models.Workspace] + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] :raises ~azure.core.exceptions.HttpResponseError: """ - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Updates a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: The update to the workspace. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + parameters: Union[_models.WorkspaceUpdate, IO], + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Updates a workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: The name of the workspace. Required. + :type workspace_name: str + :param parameters: The update to the workspace. Is either a model type or a IO type. Required. + :type parameters: ~azure.mgmt.databricks.models.WorkspaceUpdate or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] + cls = kwargs.pop("cls", None) # type: ClsType[_models.Workspace] + polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: - raw_result = self._update_initial( + raw_result = self._update_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, parameters=parameters, - cls=lambda x,y,z: x, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - - kwargs.pop('error_map', None) - kwargs.pop('content_type', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - deserialized = self._deserialize('Workspace', pipeline_response) - + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', max_length=64, min_length=3), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - - if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - else: - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}'} # type: ignore + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - def list_by_resource_group( - self, - resource_group_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces/{workspaceName}"} # type: ignore + + @distributed_trace + def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Workspace"]: """Gets all the workspaces within a resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_resource_group.metadata['url'] # type: ignore - path_format_arguments = { - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_resource_group_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -535,66 +942,80 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces'} # type: ignore + return ItemPaged(get_next, extract_data) - def list_by_subscription( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Databricks/workspaces"} # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Workspace"]: """Gets all the workspaces within a subscription. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: ~azure.core.paging.ItemPaged[~azure_databricks_management_client.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.databricks.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version = kwargs.pop( + "api_version", _params.pop("api-version", self._config.api_version) + ) # type: Literal["2022-04-01-preview"] + cls = kwargs.pop("cls", None) # type: ClsType[_models.WorkspaceListResult] + error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) - api_version = "2021-04-01-preview" - accept = "application/json" + error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): - # Construct headers - header_parameters = {} # type: Dict[str, Any] - header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') - if not next_link: - # Construct URL - url = self.list_by_subscription.metadata['url'] # type: ignore - path_format_arguments = { - 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), - } - url = self._client.format_url(url, **path_format_arguments) - # Construct parameters - query_parameters = {} # type: Dict[str, Any] - query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') - - request = self._client.get(url, query_parameters, header_parameters) + + request = build_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + else: - url = next_link - query_parameters = {} # type: Dict[str, Any] - request = self._client.get(url, query_parameters, header_parameters) + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) # type: ignore + request.method = "GET" return request def extract_data(pipeline_response): - deserialized = self._deserialize('WorkspaceListResult', pipeline_response) + deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) @@ -603,17 +1024,18 @@ def extract_data(pipeline_response): def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access + request, stream=False, **kwargs + ) response = pipeline_response.http_response if response.status_code not in [200]: - error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response) map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response - return ItemPaged( - get_next, extract_data - ) - list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces'} # type: ignore + return ItemPaged(get_next, extract_data) + + list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Databricks/workspaces"} # type: ignore diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_which_is_ready_for_customer_managed_key_(_cmk)_encryption.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_which_is_ready_for_customer_managed_key_(_cmk)_encryption.py new file mode 100644 index 000000000000..f059dc36f05d --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_which_is_ready_for_customer_managed_key_(_cmk)_encryption.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python create_a_workspace_which_is_ready_for_customer_managed_key_(_cmk)_encryption.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={ + "location": "westus", + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": {"prepareEncryption": {"value": True}}, + }, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/PrepareEncryption.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py new file mode 100644 index 000000000000..9dd29f6d39a7 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py @@ -0,0 +1,60 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python create_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={ + "location": "westus", + "properties": { + "encryption": { + "entities": { + "managedDisk": { + "keySource": "Microsoft.Keyvault", + "keyVaultProperties": { + "keyName": "test-cmk-key", + "keyVaultUri": "https://test-vault-name.vault.azure.net/", + "keyVersion": "00000000000000000000000000000000", + }, + "rotationToLatestKeyVersionEnabled": True, + } + } + }, + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + }, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceManagedDiskEncryptionCreate.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/create_an_azure_databricks_access_connector.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_an_azure_databricks_access_connector.py new file mode 100644 index 000000000000..838c00787360 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_an_azure_databricks_access_connector.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python create_an_azure_databricks_access_connector.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.access_connectors.begin_create_or_update( + resource_group_name="rg", + connector_name="myAccessConnector", + parameters={"location": "westus"}, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/AccessConnectorCreateOrUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace.py new file mode 100644 index 000000000000..53226a851a04 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace.py @@ -0,0 +1,45 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python create_or_update_workspace.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={ + "location": "westus", + "properties": {"managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG"}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceCreate.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace_with_custom_parameters.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace_with_custom_parameters.py new file mode 100644 index 000000000000..439a0d6d371d --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/create_or_update_workspace_with_custom_parameters.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python create_or_update_workspace_with_custom_parameters.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={ + "location": "westus", + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": { + "customPrivateSubnetName": {"value": "myPrivateSubnet"}, + "customPublicSubnetName": {"value": "myPublicSubnet"}, + "customVirtualNetworkId": { + "value": "/subscriptions/subid/resourceGroups/rg/providers/Microsoft.Network/virtualNetworks/myNetwork" + }, + }, + }, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceCreateWithParameters.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace.py new file mode 100644 index 000000000000..2d47745d19fa --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python delete_a_workspace.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_delete( + resource_group_name="rg", + workspace_name="myWorkspace", + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceDelete.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace_v_net_peering.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace_v_net_peering.py new file mode 100644 index 000000000000..a3156d1b4242 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_a_workspace_v_net_peering.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python delete_a_workspace_v_net_peering.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.vnet_peering.begin_delete( + resource_group_name="rg", + workspace_name="myWorkspace", + peering_name="vNetPeering", + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceVirtualNetworkPeeringDelete.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_an_azure_databricks_access_connector.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_an_azure_databricks_access_connector.py new file mode 100644 index 000000000000..027147808019 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/delete_an_azure_databricks_access_connector.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python delete_an_azure_databricks_access_connector.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.access_connectors.begin_delete( + resource_group_name="rg", + connector_name="myAccessConnector", + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/AccessConnectorDelete.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/enable_customer_managed_key_(_cmk)_encryption_on_a_workspace_which_is_prepared_for_encryption.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/enable_customer_managed_key_(_cmk)_encryption_on_a_workspace_which_is_prepared_for_encryption.py new file mode 100644 index 000000000000..2889fcd30079 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/enable_customer_managed_key_(_cmk)_encryption_on_a_workspace_which_is_prepared_for_encryption.py @@ -0,0 +1,58 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python enable_customer_managed_key_(_cmk)_encryption_on_a_workspace_which_is_prepared_for_encryption.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={ + "location": "westus", + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": { + "encryption": { + "value": { + "KeyName": "myKeyName", + "keySource": "Microsoft.Keyvault", + "keyvaulturi": "https://myKeyVault.vault.azure.net/", + "keyversion": "00000000000000000000000000000000", + } + }, + "prepareEncryption": {"value": True}, + }, + }, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/EnableEncryption.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_endpoint_connection.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_endpoint_connection.py new file mode 100644 index 000000000000..ce1860653693 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_endpoint_connection.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python get_a_private_endpoint_connection.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.private_endpoint_connections.get( + resource_group_name="myResourceGroup", + workspace_name="myWorkspace", + private_endpoint_connection_name="myWorkspace.23456789-1111-1111-1111-111111111111", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/PrivateEndpointConnectionsGet.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_link_resource.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_link_resource.py new file mode 100644 index 000000000000..b7954b676a80 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_private_link_resource.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python get_a_private_link_resource.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.private_link_resources.get( + resource_group_name="myResourceGroup", + workspace_name="myWorkspace", + group_id="databricks_ui_api", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/PrivateLinkResourcesGet.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace.py new file mode 100644 index 000000000000..d737a1235d00 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python get_a_workspace.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.get( + resource_group_name="rg", + workspace_name="myWorkspace", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceGet.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_custom_parameters.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_custom_parameters.py new file mode 100644 index 000000000000..72060572e68c --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_custom_parameters.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python get_a_workspace_with_custom_parameters.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.get( + resource_group_name="rg", + workspace_name="myWorkspace", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceGetParameters.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py new file mode 100644 index 000000000000..51b58e3ed5db --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python get_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.get( + resource_group_name="rg", + workspace_name="myWorkspace", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceManagedDiskEncryptionGet.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_v_net_peering_configured.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_v_net_peering_configured.py new file mode 100644 index 000000000000..18d3349095c3 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_a_workspace_with_v_net_peering_configured.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python get_a_workspace_with_v_net_peering_configured.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.vnet_peering.get( + resource_group_name="rg", + workspace_name="myWorkspace", + peering_name="vNetPeering", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceVirtualNetPeeringGet.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/get_an_azure_databricks_access_connector.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_an_azure_databricks_access_connector.py new file mode 100644 index 000000000000..d1fdd2b6195f --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/get_an_azure_databricks_access_connector.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python get_an_azure_databricks_access_connector.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.access_connectors.get( + resource_group_name="rg", + connector_name="myAccessConnector", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/AccessConnectorGet.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_all_v_net_peerings_for_the_workspace.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_all_v_net_peerings_for_the_workspace.py new file mode 100644 index 000000000000..85cd933e0c6d --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_all_v_net_peerings_for_the_workspace.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python list_all_v_net_peerings_for_the_workspace.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.vnet_peering.list_by_workspace( + resource_group_name="rg", + workspace_name="myWorkspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceVirtualNetPeeringList.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_outbound_network_dependencies_endpoints_by_workspace.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_outbound_network_dependencies_endpoints_by_workspace.py new file mode 100644 index 000000000000..e470ebe05122 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_outbound_network_dependencies_endpoints_by_workspace.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python list_outbound_network_dependencies_endpoints_by_workspace.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.outbound_network_dependencies_endpoints.list( + resource_group_name="myResourceGroup", + workspace_name="myWorkspace", + ) + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/OutboundNetworkDependenciesEndpointsList.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py new file mode 100644 index 000000000000..0369d8d393b3 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_endpoint_connections.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python list_private_endpoint_connections.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.private_endpoint_connections.list( + resource_group_name="myResourceGroup", + workspace_name="myWorkspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/ListPrivateEndpointConnections.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py new file mode 100644 index 000000000000..b445103aea3c --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/list_private_link_resources.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python list_private_link_resources.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.private_link_resources.list( + resource_group_name="myResourceGroup", + workspace_name="myWorkspace", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/ListPrivateLinkResources.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_all_the_azure_databricks_access_connectors_within_a_subscription..py b/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_all_the_azure_databricks_access_connectors_within_a_subscription..py new file mode 100644 index 000000000000..b6319a6f7f23 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_all_the_azure_databricks_access_connectors_within_a_subscription..py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python lists_all_the_azure_databricks_access_connectors_within_a_subscription..py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.access_connectors.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/AccessConnectorsListBySubscriptionId.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_azure_databricks_access_connectors_within_a_resource_group.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_azure_databricks_access_connectors_within_a_resource_group.py new file mode 100644 index 000000000000..30d2699b2e43 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_azure_databricks_access_connectors_within_a_resource_group.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python lists_azure_databricks_access_connectors_within_a_resource_group.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.access_connectors.list_by_resource_group( + resource_group_name="rg", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/AccessConnectorsListByResourceGroup.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_workspaces.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_workspaces.py new file mode 100644 index 000000000000..f917b8607408 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/lists_workspaces.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python lists_workspaces.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.list_by_resource_group( + resource_group_name="rg", + ) + for item in response: + print(item) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspacesListByResourceGroup.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/operations.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/operations.py new file mode 100644 index 000000000000..b2f9bfc054d5 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/operations.py @@ -0,0 +1,39 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python operations.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.operations.list() + for item in response: + print(item) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/OperationsList.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/remove_a_private_endpoint_connection.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/remove_a_private_endpoint_connection.py new file mode 100644 index 000000000000..d7bde2f974d0 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/remove_a_private_endpoint_connection.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python remove_a_private_endpoint_connection.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.private_endpoint_connections.begin_delete( + resource_group_name="myResourceGroup", + workspace_name="myWorkspace", + private_endpoint_connection_name="myWorkspace.23456789-1111-1111-1111-111111111111", + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/PrivateEndpointConnectionsDelete.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/revert_customer_managed_key_(_cmk)_encryption_to_microsoft_managed_keys_encryption_on_a_workspace.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/revert_customer_managed_key_(_cmk)_encryption_to_microsoft_managed_keys_encryption_on_a_workspace.py new file mode 100644 index 000000000000..4f60ba9de959 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/revert_customer_managed_key_(_cmk)_encryption_to_microsoft_managed_keys_encryption_on_a_workspace.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python revert_customer_managed_key_(_cmk)_encryption_to_microsoft_managed_keys_encryption_on_a_workspace.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={ + "location": "westus", + "properties": { + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + "parameters": {"encryption": {"value": {"keySource": "Default"}}}, + }, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/DisableEncryption.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_private_endpoint_connection.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_private_endpoint_connection.py new file mode 100644 index 000000000000..153930e934d5 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_private_endpoint_connection.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python update_a_private_endpoint_connection.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="11111111-1111-1111-1111-111111111111", + ) + + response = client.private_endpoint_connections.begin_create( + resource_group_name="myResourceGroup", + workspace_name="myWorkspace", + private_endpoint_connection_name="myWorkspace.23456789-1111-1111-1111-111111111111", + private_endpoint_connection={ + "properties": { + "privateLinkServiceConnectionState": { + "description": "Approved by databricksadmin@contoso.com", + "status": "Approved", + } + } + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/PrivateEndpointConnectionsUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace's_tags..py b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace's_tags..py new file mode 100644 index 000000000000..e0385ed6d876 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace's_tags..py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python update_a_workspace's_tags..py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={"tags": {"mytag1": "myvalue1"}}, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py new file mode 100644 index 000000000000..9277a1dd92c2 --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py @@ -0,0 +1,61 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python update_a_workspace_with_customer_managed_key_(_cmk)_encryption_for_managed_disks.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.workspaces.begin_create_or_update( + resource_group_name="rg", + workspace_name="myWorkspace", + parameters={ + "location": "westus", + "properties": { + "encryption": { + "entities": { + "managedDisk": { + "keySource": "Microsoft.Keyvault", + "keyVaultProperties": { + "keyName": "test-cmk-key", + "keyVaultUri": "https://test-vault-name.vault.azure.net/", + "keyVersion": "00000000000000000000000000000000", + }, + "rotationToLatestKeyVersionEnabled": True, + } + } + }, + "managedResourceGroupId": "/subscriptions/subid/resourceGroups/myManagedRG", + }, + "tags": {"mytag1": "myvalue1"}, + }, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/WorkspaceManagedDiskEncryptionUpdate.json +if __name__ == "__main__": + main() diff --git a/sdk/databricks/azure-mgmt-databricks/generated_samples/update_an_azure_databricks_access_connector.py b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_an_azure_databricks_access_connector.py new file mode 100644 index 000000000000..936e6a8f60eb --- /dev/null +++ b/sdk/databricks/azure-mgmt-databricks/generated_samples/update_an_azure_databricks_access_connector.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential +from azure.mgmt.databricks import AzureDatabricksManagementClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-databricks +# USAGE + python update_an_azure_databricks_access_connector.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = AzureDatabricksManagementClient( + credential=DefaultAzureCredential(), + subscription_id="subid", + ) + + response = client.access_connectors.begin_update( + resource_group_name="rg", + connector_name="myAccessConnector", + parameters={"tags": {"key1": "value1"}}, + ).result() + print(response) + + +# x-ms-original-file: specification/databricks/resource-manager/Microsoft.Databricks/preview/2022-04-01-preview/examples/AccessConnectorPatchUpdate.json +if __name__ == "__main__": + main()