From f846a345ae9084d97e00d2f9b164e6bcf09ad206 Mon Sep 17 00:00:00 2001 From: Laurent Mazuel Date: Tue, 18 Feb 2020 13:32:20 -0800 Subject: [PATCH 1/3] Initial generation Synapse autorest v5 --- sdk/synapse/azure-synapse/CHANGELOG.md | 5 + sdk/synapse/azure-synapse/MANIFEST.in | 4 + sdk/synapse/azure-synapse/README.md | 21 + sdk/synapse/azure-synapse/azure/__init__.py | 1 + .../azure-synapse/azure/synapse/__init__.py | 13 + .../azure/synapse/_configuration.py | 68 + .../azure/synapse/_synapse_client.py | 77 + .../azure-synapse/azure/synapse/_version.py | 9 + .../azure/synapse/aio/__init__.py | 10 + .../azure/synapse/aio/_configuration_async.py | 66 + .../synapse/aio/_synapse_client_async.py | 73 + .../synapse/aio/operations_async/__init__.py | 19 + .../_monitoring_operations_async.py | 413 ++++ .../_spark_batch_operations_async.py | 295 +++ .../_spark_session_operations_async.py | 596 ++++++ .../_workspace_acl_operations_async.py | 159 ++ .../azure/synapse/models/__init__.py | 142 ++ .../azure/synapse/models/_models.py | 1468 ++++++++++++++ .../azure/synapse/models/_models_py3.py | 1714 +++++++++++++++++ .../synapse/models/_synapse_client_enums.py | 44 + .../azure/synapse/operations/__init__.py | 19 + .../operations/_monitoring_operations.py | 419 ++++ .../operations/_spark_batch_operations.py | 299 +++ .../operations/_spark_session_operations.py | 605 ++++++ .../operations/_workspace_acl_operations.py | 161 ++ sdk/synapse/azure-synapse/sdk_packaging.toml | 9 + sdk/synapse/azure-synapse/setup.cfg | 2 + sdk/synapse/azure-synapse/setup.py | 86 + 28 files changed, 6797 insertions(+) create mode 100644 sdk/synapse/azure-synapse/CHANGELOG.md create mode 100644 sdk/synapse/azure-synapse/MANIFEST.in create mode 100644 sdk/synapse/azure-synapse/README.md create mode 100644 sdk/synapse/azure-synapse/azure/__init__.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/__init__.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/_configuration.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/_synapse_client.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/_version.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/__init__.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/_configuration_async.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/_synapse_client_async.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/__init__.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_monitoring_operations_async.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_batch_operations_async.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_session_operations_async.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_workspace_acl_operations_async.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/models/__init__.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/models/_models.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/models/_synapse_client_enums.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/operations/__init__.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/operations/_monitoring_operations.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/operations/_spark_batch_operations.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/operations/_spark_session_operations.py create mode 100644 sdk/synapse/azure-synapse/azure/synapse/operations/_workspace_acl_operations.py create mode 100644 sdk/synapse/azure-synapse/sdk_packaging.toml create mode 100644 sdk/synapse/azure-synapse/setup.cfg create mode 100644 sdk/synapse/azure-synapse/setup.py diff --git a/sdk/synapse/azure-synapse/CHANGELOG.md b/sdk/synapse/azure-synapse/CHANGELOG.md new file mode 100644 index 000000000000..578ed6acf479 --- /dev/null +++ b/sdk/synapse/azure-synapse/CHANGELOG.md @@ -0,0 +1,5 @@ +# Release History + +## 0.1.0 (1970-01-01) + +* Initial Release diff --git a/sdk/synapse/azure-synapse/MANIFEST.in b/sdk/synapse/azure-synapse/MANIFEST.in new file mode 100644 index 000000000000..cad40fe06b5f --- /dev/null +++ b/sdk/synapse/azure-synapse/MANIFEST.in @@ -0,0 +1,4 @@ +recursive-include tests *.py *.yaml +include *.md +include azure/__init__.py + diff --git a/sdk/synapse/azure-synapse/README.md b/sdk/synapse/azure-synapse/README.md new file mode 100644 index 000000000000..a45427594d12 --- /dev/null +++ b/sdk/synapse/azure-synapse/README.md @@ -0,0 +1,21 @@ +# Microsoft Azure SDK for Python + +This is the Microsoft Azure Synapse Client Library. +This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8. +For a more complete view of Azure libraries, see the [Github repo](https://github.com/Azure/azure-sdk-for-python/sdk) + + +# Usage + +For code examples, see [Synapse](https://docs.microsoft.com/python/api/overview/azure/) +on docs.microsoft.com. + + +# Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. + + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-python%2Fazure-synapse%2FREADME.png) diff --git a/sdk/synapse/azure-synapse/azure/__init__.py b/sdk/synapse/azure-synapse/azure/__init__.py new file mode 100644 index 000000000000..0260537a02bb --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__('pkgutil').extend_path(__path__, __name__) \ No newline at end of file diff --git a/sdk/synapse/azure-synapse/azure/synapse/__init__.py b/sdk/synapse/azure-synapse/azure/synapse/__init__.py new file mode 100644 index 000000000000..315264487391 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/__init__.py @@ -0,0 +1,13 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._synapse_client import SynapseClient +from ._version import VERSION + +__version__ = VERSION +__all__ = ['SynapseClient'] diff --git a/sdk/synapse/azure-synapse/azure/synapse/_configuration.py b/sdk/synapse/azure-synapse/azure/synapse/_configuration.py new file mode 100644 index 000000000000..d634d3d07d6c --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/_configuration.py @@ -0,0 +1,68 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +from ._version import VERSION + + +class SynapseClientConfiguration(Configuration): + """Configuration for SynapseClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: azure.core.credentials.TokenCredential + :param synapse_dns_suffix: Gets the DNS suffix used as the base for all Synapse service requests. + :type synapse_dns_suffix: str + :param livy_api_version: Valid api-version for the request. + :type livy_api_version: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + synapse_dns_suffix, # type: str + livy_api_version="2019-11-01-preview", # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if synapse_dns_suffix is None: + raise ValueError("Parameter 'synapse_dns_suffix' must not be None.") + if livy_api_version is None: + raise ValueError("Parameter 'livy_api_version' must not be None.") + super(SynapseClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.synapse_dns_suffix = synapse_dns_suffix + self.livy_api_version = livy_api_version + self.api_version = "2019-11-01-preview" + self.credential_scopes = ['https://dev.azuresynapse.net/.default'] + self._configure(**kwargs) + self.user_agent_policy.add_user_agent('azsdk-python-synapseclient/{}'.format(VERSION)) + + def _configure( + self, + **kwargs # type: Any + ): + # type: (...) -> None + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/synapse/azure-synapse/azure/synapse/_synapse_client.py b/sdk/synapse/azure-synapse/azure/synapse/_synapse_client.py new file mode 100644 index 000000000000..714ac5bd6234 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/_synapse_client.py @@ -0,0 +1,77 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core import PipelineClient +from msrest import Deserializer, Serializer + +from ._configuration import SynapseClientConfiguration +from .operations import MonitoringOperations +from .operations import SparkBatchOperations +from .operations import SparkSessionOperations +from .operations import WorkspaceAclOperations +from . import models + + +class SynapseClient(object): + """ + + :ivar monitoring: MonitoringOperations operations + :vartype monitoring: azure.synapse.operations.MonitoringOperations + :ivar spark_batch: SparkBatchOperations operations + :vartype spark_batch: azure.synapse.operations.SparkBatchOperations + :ivar spark_session: SparkSessionOperations operations + :vartype spark_session: azure.synapse.operations.SparkSessionOperations + :ivar workspace_acl: WorkspaceAclOperations operations + :vartype workspace_acl: azure.synapse.operations.WorkspaceAclOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: azure.core.credentials.TokenCredential + :param synapse_dns_suffix: Gets the DNS suffix used as the base for all Synapse service requests. + :type synapse_dns_suffix: str + :param livy_api_version: Valid api-version for the request. + :type livy_api_version: str + """ + + def __init__( + self, + credential, # type: "TokenCredential" + synapse_dns_suffix, # type: str + livy_api_version="2019-11-01-preview", # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> None + base_url = 'https://{workspaceName}.{SynapseDnsSuffix}' + self._config = SynapseClientConfiguration(credential, synapse_dns_suffix, livy_api_version, **kwargs) + self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.monitoring = MonitoringOperations( + self._client, self._config, self._serialize, self._deserialize) + self.spark_batch = SparkBatchOperations( + self._client, self._config, self._serialize, self._deserialize) + self.spark_session = SparkSessionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_acl = WorkspaceAclOperations( + self._client, self._config, self._serialize, self._deserialize) + + def close(self): + # type: () -> None + self._client.close() + + def __enter__(self): + # type: () -> SynapseClient + self._client.__enter__() + return self + + def __exit__(self, *exc_details): + # type: (Any) -> None + self._client.__exit__(*exc_details) diff --git a/sdk/synapse/azure-synapse/azure/synapse/_version.py b/sdk/synapse/azure-synapse/azure/synapse/_version.py new file mode 100644 index 000000000000..eae7c95b6fbd --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "0.1.0" diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/__init__.py b/sdk/synapse/azure-synapse/azure/synapse/aio/__init__.py new file mode 100644 index 000000000000..626cce6727c8 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/__init__.py @@ -0,0 +1,10 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._synapse_client_async import SynapseClient +__all__ = ['SynapseClient'] diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/_configuration_async.py b/sdk/synapse/azure-synapse/azure/synapse/aio/_configuration_async.py new file mode 100644 index 000000000000..2e9643ab5a71 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/_configuration_async.py @@ -0,0 +1,66 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core.configuration import Configuration +from azure.core.pipeline import policies + +from .._version import VERSION + + +class SynapseClientConfiguration(Configuration): + """Configuration for SynapseClient + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential needed for the client to connect to Azure. + :type credential: azure.core.credentials.TokenCredential + :param synapse_dns_suffix: Gets the DNS suffix used as the base for all Synapse service requests. + :type synapse_dns_suffix: str + :param livy_api_version: Valid api-version for the request. + :type livy_api_version: str + """ + + def __init__( + self, + credential: "TokenCredential", + synapse_dns_suffix: str, + livy_api_version: Optional[str] = "2019-11-01-preview", + **kwargs: Any + ) -> None: + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if synapse_dns_suffix is None: + raise ValueError("Parameter 'synapse_dns_suffix' must not be None.") + if livy_api_version is None: + raise ValueError("Parameter 'livy_api_version' must not be None.") + super(SynapseClientConfiguration, self).__init__(**kwargs) + + self.credential = credential + self.synapse_dns_suffix = synapse_dns_suffix + self.livy_api_version = livy_api_version + self.api_version = "2019-11-01-preview" + self.credential_scopes = ['https://dev.azuresynapse.net/.default'] + self._configure(**kwargs) + self.user_agent_policy.add_user_agent('azsdk-python-synapseclient/{}'.format(VERSION)) + + def _configure( + self, + **kwargs: Any + ) -> None: + self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get('authentication_policy') + if self.credential and not self.authentication_policy: + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs) diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/_synapse_client_async.py b/sdk/synapse/azure-synapse/azure/synapse/aio/_synapse_client_async.py new file mode 100644 index 000000000000..22543e9820c9 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/_synapse_client_async.py @@ -0,0 +1,73 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any + +from azure.core import AsyncPipelineClient +from msrest import Deserializer, Serializer + +from ._configuration_async import SynapseClientConfiguration +from .operations_async import MonitoringOperations +from .operations_async import SparkBatchOperations +from .operations_async import SparkSessionOperations +from .operations_async import WorkspaceAclOperations +from .. import models + + +class SynapseClient(object): + """ + + :ivar monitoring: MonitoringOperations operations + :vartype monitoring: azure.synapse.aio.operations_async.MonitoringOperations + :ivar spark_batch: SparkBatchOperations operations + :vartype spark_batch: azure.synapse.aio.operations_async.SparkBatchOperations + :ivar spark_session: SparkSessionOperations operations + :vartype spark_session: azure.synapse.aio.operations_async.SparkSessionOperations + :ivar workspace_acl: WorkspaceAclOperations operations + :vartype workspace_acl: azure.synapse.aio.operations_async.WorkspaceAclOperations + :param credential: Credential needed for the client to connect to Azure. + :type credential: azure.core.credentials.TokenCredential + :param synapse_dns_suffix: Gets the DNS suffix used as the base for all Synapse service requests. + :type synapse_dns_suffix: str + :param livy_api_version: Valid api-version for the request. + :type livy_api_version: str + """ + + def __init__( + self, + credential: "TokenCredential", + synapse_dns_suffix: str, + livy_api_version: Optional[str] = "2019-11-01-preview", + **kwargs: Any + ) -> None: + base_url = 'https://{workspaceName}.{SynapseDnsSuffix}' + self._config = SynapseClientConfiguration(credential, synapse_dns_suffix, livy_api_version, **kwargs) + self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) + + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + self._serialize = Serializer(client_models) + self._deserialize = Deserializer(client_models) + + self.monitoring = MonitoringOperations( + self._client, self._config, self._serialize, self._deserialize) + self.spark_batch = SparkBatchOperations( + self._client, self._config, self._serialize, self._deserialize) + self.spark_session = SparkSessionOperations( + self._client, self._config, self._serialize, self._deserialize) + self.workspace_acl = WorkspaceAclOperations( + self._client, self._config, self._serialize, self._deserialize) + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> "SynapseClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/__init__.py b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/__init__.py new file mode 100644 index 000000000000..b04530552597 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._monitoring_operations_async import MonitoringOperations +from ._spark_batch_operations_async import SparkBatchOperations +from ._spark_session_operations_async import SparkSessionOperations +from ._workspace_acl_operations_async import WorkspaceAclOperations + +__all__ = [ + 'MonitoringOperations', + 'SparkBatchOperations', + 'SparkSessionOperations', + 'WorkspaceAclOperations', +] diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_monitoring_operations_async.py b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_monitoring_operations_async.py new file mode 100644 index 000000000000..817084f8c442 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_monitoring_operations_async.py @@ -0,0 +1,413 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class MonitoringOperations: + """MonitoringOperations async operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get_history_server_data( + self, + workspace_name: str, + pool_name: str, + livy_id: str, + app_id: str, + attempt_id: str, + **kwargs + ) -> "models.HistoryServerDataResponse": + """Get History Server Data for a given workspace, pool, livyid, appid and attemptId. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :param app_id: The application id. + :type app_id: str + :param attempt_id: The attempt id. + :type attempt_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerDataResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerDataResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.HistoryServerDataResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_data.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + 'appId': self._serialize.url("app_id", app_id, 'str'), + 'attemptId': self._serialize.url("attempt_id", attempt_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerDataResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_data.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}/applications/{appId}/attemptIds/{attemptId}/historyServerData'} + + async def get_spark_job_list( + self, + workspace_name: str, + **kwargs + ) -> "models.SparkJobListViewResponse": + """Get list of spark applications for the workspace. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SparkJobListViewResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.SparkJobListViewResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.SparkJobListViewResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_spark_job_list.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SparkJobListViewResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_spark_job_list.metadata = {'url': '/monitoring/workloadTypes/spark/Applications'} + + async def get_application_details( + self, + workspace_name: str, + pool_name: str, + livy_id: str, + **kwargs + ) -> "models.SparkJobListViewResponse": + """Get one spark application details given the workspace name, pool name and livyid. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SparkJobListViewResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.SparkJobListViewResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.SparkJobListViewResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_application_details.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SparkJobListViewResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_application_details.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}'} + + async def get_history_server_properties( + self, + workspace_name: str, + **kwargs + ) -> "models.HistoryServerPropertiesResponse": + """Get History server properties. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerPropertiesResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerPropertiesResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.HistoryServerPropertiesResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_properties.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerPropertiesResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_properties.metadata = {'url': '/monitoring/workloadTypes/spark/historyServerProperties'} + + async def get_history_server_diagnostic( + self, + workspace_name: str, + pool_name: str, + livy_id: str, + app_id: str, + attempt_id: str, + **kwargs + ) -> "models.HistoryServerDiagnosticResponse": + """Get History Server Diagnostic Data for a given workspace, pool, livyid, appid and attemptId. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :param app_id: The application id. + :type app_id: str + :param attempt_id: The attempt id. + :type attempt_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerDiagnosticResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerDiagnosticResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.HistoryServerDiagnosticResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_diagnostic.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + 'appId': self._serialize.url("app_id", app_id, 'str'), + 'attemptId': self._serialize.url("attempt_id", attempt_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerDiagnosticResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_diagnostic.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}/applications/{appId}/attemptIds/{attemptId}/historyServerDiagnostic'} + + async def get_history_server_graph( + self, + workspace_name: str, + pool_name: str, + livy_id: str, + app_id: str, + attempt_id: str, + **kwargs + ) -> "models.HistoryServerGraphResponse": + """Get History Server Graph Data for a given workspace, pool, livyid, appid and attemptId. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :param app_id: The application id. + :type app_id: str + :param attempt_id: The attempt id. + :type attempt_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerGraphResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerGraphResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.HistoryServerGraphResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_graph.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + 'appId': self._serialize.url("app_id", app_id, 'str'), + 'attemptId': self._serialize.url("attempt_id", attempt_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerGraphResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_graph.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}/applications/{appId}/attemptIds/{attemptId}/historyServerGraph'} diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_batch_operations_async.py b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_batch_operations_async.py new file mode 100644 index 000000000000..37b005d57e86 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_batch_operations_async.py @@ -0,0 +1,295 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SparkBatchOperations: + """SparkBatchOperations async operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + workspace_name: str, + spark_pool_name: str, + from_parameter: Optional[int] = None, + size: Optional[int] = None, + detailed: Optional[bool] = None, + **kwargs + ) -> "models.ExtendedLivyListBatchResponse": + """List all spark batch jobs which are running under a particular spark pool. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param from_parameter: Optional param specifying which index the list should begin from. + :type from_parameter: int + :param size: Optional param specifying the size of the returned list. + By default it is 20 and that is the maximum. + :type size: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyListBatchResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyListBatchResponse + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.ExtendedLivyListBatchResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + if from_parameter is not None: + query_parameters['from'] = self._serialize.query("from_parameter", from_parameter, 'int') + if size is not None: + query_parameters['size'] = self._serialize.query("size", size, 'int') + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyListBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches'} + + async def create( + self, + workspace_name: str, + spark_pool_name: str, + livy_request: "models.ExtendedLivyBatchRequest", + detailed: Optional[bool] = None, + **kwargs + ) -> "models.ExtendedLivyBatchResponse": + """Create new spark batch job. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param livy_request: Livy compatible batch job request payload. + :type livy_request: ~azure.synapse.models.ExtendedLivyBatchRequest + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyBatchResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyBatchResponse + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.ExtendedLivyBatchResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(livy_request, 'ExtendedLivyBatchRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches'} + + async def get( + self, + workspace_name: str, + spark_pool_name: str, + batch_id: int, + detailed: Optional[bool] = None, + **kwargs + ) -> "models.ExtendedLivyBatchResponse": + """Gets a single spark batch job. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param batch_id: Identifier for the batch job. + :type batch_id: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyBatchResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyBatchResponse + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.ExtendedLivyBatchResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'batchId': self._serialize.url("batch_id", batch_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}'} + + async def delete( + self, + workspace_name: str, + spark_pool_name: str, + batch_id: int, + **kwargs + ) -> None: + """Cancels a running spark batch job. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param batch_id: Identifier for the batch job. + :type batch_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType[None] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'batchId': self._serialize.url("batch_id", batch_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + + # Construct headers + header_parameters: Dict[str, Any] = {} + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}'} diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_session_operations_async.py b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_session_operations_async.py new file mode 100644 index 000000000000..f85611e15e96 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_spark_session_operations_async.py @@ -0,0 +1,596 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class SparkSessionOperations: + """SparkSessionOperations async operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def list( + self, + workspace_name: str, + spark_pool_name: str, + from_parameter: Optional[int] = None, + size: Optional[int] = None, + detailed: Optional[bool] = None, + **kwargs + ) -> "models.ExtendedLivyListSessionResponse": + """List all spark sessions which are running under a particular spark pool. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param from_parameter: Optional param specifying which index the list should begin from. + :type from_parameter: int + :param size: Optional param specifying the size of the returned list. + By default it is 20 and that is the maximum. + :type size: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyListSessionResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyListSessionResponse + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.ExtendedLivyListSessionResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + if from_parameter is not None: + query_parameters['from'] = self._serialize.query("from_parameter", from_parameter, 'int') + if size is not None: + query_parameters['size'] = self._serialize.query("size", size, 'int') + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyListSessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions'} + + async def create( + self, + workspace_name: str, + spark_pool_name: str, + livy_request: "models.ExtendedLivySessionRequest", + detailed: Optional[bool] = None, + **kwargs + ) -> "models.ExtendedLivySessionResponse": + """Create new spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param livy_request: Livy compatible batch job request payload. + :type livy_request: ~azure.synapse.models.ExtendedLivySessionRequest + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivySessionResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivySessionResponse + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.ExtendedLivySessionResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(livy_request, 'ExtendedLivySessionRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivySessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions'} + + async def get( + self, + workspace_name: str, + spark_pool_name: str, + session_id: int, + detailed: Optional[bool] = None, + **kwargs + ) -> "models.ExtendedLivySessionResponse": + """Gets a single spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivySessionResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivySessionResponse + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.ExtendedLivySessionResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivySessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}'} + + async def delete( + self, + workspace_name: str, + spark_pool_name: str, + session_id: int, + **kwargs + ) -> None: + """Cancels a running spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType[None] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + + # Construct headers + header_parameters: Dict[str, Any] = {} + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}'} + + async def reset_timeout( + self, + workspace_name: str, + spark_pool_name: str, + session_id: int, + **kwargs + ) -> None: + """Sends a keep alive call to the current session to reset the session timeout. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType[None] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.reset_timeout.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + + # Construct headers + header_parameters: Dict[str, Any] = {} + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) + + reset_timeout.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/reset-timeout'} + + async def list_statements( + self, + workspace_name: str, + spark_pool_name: str, + session_id: int, + **kwargs + ) -> "models.LivyStatementsResponseBody": + """Gets a list of statements within a spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementsResponseBody or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementsResponseBody + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.LivyStatementsResponseBody"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.list_statements.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementsResponseBody', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_statements.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements'} + + async def create_statement( + self, + workspace_name: str, + spark_pool_name: str, + session_id: int, + livy_request: "models.LivyStatementRequestBody", + **kwargs + ) -> "models.LivyStatementResponseBody": + """Create statement within a spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param livy_request: Livy compatible batch job request payload. + :type livy_request: ~azure.synapse.models.LivyStatementRequestBody + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementResponseBody or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementResponseBody + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.LivyStatementResponseBody"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.create_statement.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(livy_request, 'LivyStatementRequestBody') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementResponseBody', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_statement.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements'} + + async def get_statement( + self, + workspace_name: str, + spark_pool_name: str, + session_id: int, + statement_id: int, + **kwargs + ) -> "models.LivyStatementResponseBody": + """Gets a single statement within a spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param statement_id: Identifier for the statement. + :type statement_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementResponseBody or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementResponseBody + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.LivyStatementResponseBody"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.get_statement.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + 'statementId': self._serialize.url("statement_id", statement_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementResponseBody', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_statement.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}'} + + async def delete_statement( + self, + workspace_name: str, + spark_pool_name: str, + session_id: int, + statement_id: int, + **kwargs + ) -> "models.LivyStatementCancellationResponse": + """Kill a statement within a session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param statement_id: Identifier for the statement. + :type statement_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementCancellationResponse or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementCancellationResponse + :raises: ~azure.core.HttpResponseError + """ + cls: ClsType["models.LivyStatementCancellationResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.delete_statement.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + 'statementId': self._serialize.url("statement_id", statement_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementCancellationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + delete_statement.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}/cancel'} diff --git a/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_workspace_acl_operations_async.py b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_workspace_acl_operations_async.py new file mode 100644 index 000000000000..8b34345d4f32 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/aio/operations_async/_workspace_acl_operations_async.py @@ -0,0 +1,159 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +import warnings + +from azure.core.exceptions import map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceAclOperations: + """WorkspaceAclOperations async operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + async def get_access_control_info( + self, + workspace_name: str, + artifact_name: Optional[str] = None, + **kwargs + ) -> "models.WorkspaceAccessControlResponse": + """Get access control info. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param artifact_name: + :type artifact_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceAccessControlResponse or the result of cls(response) + :rtype: ~azure.synapse.models.WorkspaceAccessControlResponse + :raises: ~azure.synapse.models.ErrorResponseException: + """ + cls: ClsType["models.WorkspaceAccessControlResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + _resource = models.GetAccessControlInfoRequest(artifact_name=artifact_name) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_access_control_info.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(_resource, 'GetAccessControlInfoRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise models.ErrorResponseException.from_response(response, self._deserialize) + + deserialized = self._deserialize('WorkspaceAccessControlResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_access_control_info.metadata = {'url': '/getAccessControl'} + + async def set_workspace_administrators( + self, + workspace_name: str, + administrators: Optional[List[str]] = None, + **kwargs + ) -> "models.WorkspaceAccessControlResponse": + """Replace Admins of the Workspace. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param administrators: + :type administrators: list[str] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceAccessControlResponse or the result of cls(response) + :rtype: ~azure.synapse.models.WorkspaceAccessControlResponse + :raises: ~azure.synapse.models.ErrorResponseException: + """ + cls: ClsType["models.WorkspaceAccessControlResponse"] = kwargs.pop('cls', None ) + error_map = kwargs.pop('error_map', {}) + + _request = models.SetWorkspaceAdministratorsRequest(administrators=administrators) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.set_workspace_administrators.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters: Dict[str, Any] = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters: Dict[str, Any] = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(_request, 'SetWorkspaceAdministratorsRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise models.ErrorResponseException.from_response(response, self._deserialize) + + deserialized = self._deserialize('WorkspaceAccessControlResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_workspace_administrators.metadata = {'url': '/setWorkspaceAdmins'} diff --git a/sdk/synapse/azure-synapse/azure/synapse/models/__init__.py b/sdk/synapse/azure-synapse/azure/synapse/models/__init__.py new file mode 100644 index 000000000000..89b7cd50b3f7 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/models/__init__.py @@ -0,0 +1,142 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +try: + from ._models_py3 import Config + from ._models_py3 import Data + from ._models_py3 import Edge + from ._models_py3 import ErrorDetail + from ._models_py3 import ErrorInformation + from ._models_py3 import ErrorResponse, ErrorResponseException + from ._models_py3 import Executors + from ._models_py3 import ExtendedLivyBatchRequest + from ._models_py3 import ExtendedLivyBatchResponse + from ._models_py3 import ExtendedLivyListBatchResponse + from ._models_py3 import ExtendedLivyListSessionResponse + from ._models_py3 import ExtendedLivySessionRequest + from ._models_py3 import ExtendedLivySessionResponse + from ._models_py3 import GetAccessControlInfoRequest + from ._models_py3 import HistoryServerDataResponse + from ._models_py3 import HistoryServerDiagnosticResponse + from ._models_py3 import HistoryServerDiagnosticResponseData + from ._models_py3 import HistoryServerGraphResponse + from ._models_py3 import HistoryServerGraphResponseData + from ._models_py3 import HistoryServerPropertiesResponse + from ._models_py3 import Jobs + from ._models_py3 import LivyBatchStateInformation + from ._models_py3 import LivyRequestBase + from ._models_py3 import LivySessionStateInformation + from ._models_py3 import LivyStatementCancellationResponse + from ._models_py3 import LivyStatementOutput + from ._models_py3 import LivyStatementOutputData + from ._models_py3 import LivyStatementRequestBody + from ._models_py3 import LivyStatementResponseBody + from ._models_py3 import LivyStatementsResponseBody + from ._models_py3 import Option + from ._models_py3 import SchedulerInformation + from ._models_py3 import SetWorkspaceAdministratorsRequest + from ._models_py3 import SparkJob + from ._models_py3 import SparkJobListViewResponse + from ._models_py3 import SparkServicePluginInformation + from ._models_py3 import Stages + from ._models_py3 import Tables + from ._models_py3 import WorkspaceAccessControlResponse +except (SyntaxError, ImportError): + from ._models import Config # type: ignore + from ._models import Data # type: ignore + from ._models import Edge # type: ignore + from ._models import ErrorDetail # type: ignore + from ._models import ErrorInformation # type: ignore + from ._models import ErrorResponse, ErrorResponseException # type: ignore + from ._models import Executors # type: ignore + from ._models import ExtendedLivyBatchRequest # type: ignore + from ._models import ExtendedLivyBatchResponse # type: ignore + from ._models import ExtendedLivyListBatchResponse # type: ignore + from ._models import ExtendedLivyListSessionResponse # type: ignore + from ._models import ExtendedLivySessionRequest # type: ignore + from ._models import ExtendedLivySessionResponse # type: ignore + from ._models import GetAccessControlInfoRequest # type: ignore + from ._models import HistoryServerDataResponse # type: ignore + from ._models import HistoryServerDiagnosticResponse # type: ignore + from ._models import HistoryServerDiagnosticResponseData # type: ignore + from ._models import HistoryServerGraphResponse # type: ignore + from ._models import HistoryServerGraphResponseData # type: ignore + from ._models import HistoryServerPropertiesResponse # type: ignore + from ._models import Jobs # type: ignore + from ._models import LivyBatchStateInformation # type: ignore + from ._models import LivyRequestBase # type: ignore + from ._models import LivySessionStateInformation # type: ignore + from ._models import LivyStatementCancellationResponse # type: ignore + from ._models import LivyStatementOutput # type: ignore + from ._models import LivyStatementOutputData # type: ignore + from ._models import LivyStatementRequestBody # type: ignore + from ._models import LivyStatementResponseBody # type: ignore + from ._models import LivyStatementsResponseBody # type: ignore + from ._models import Option # type: ignore + from ._models import SchedulerInformation # type: ignore + from ._models import SetWorkspaceAdministratorsRequest # type: ignore + from ._models import SparkJob # type: ignore + from ._models import SparkJobListViewResponse # type: ignore + from ._models import SparkServicePluginInformation # type: ignore + from ._models import Stages # type: ignore + from ._models import Tables # type: ignore + from ._models import WorkspaceAccessControlResponse # type: ignore +from ._synapse_client_enums import ( + ErrorSource, + JobResult, + JobType, + PluginCurrentState, + SchedulerCurrentState, +) + +__all__ = [ + 'Config', + 'Data', + 'Edge', + 'ErrorDetail', + 'ErrorInformation', + 'ErrorResponse', 'ErrorResponseException', + 'Executors', + 'ExtendedLivyBatchRequest', + 'ExtendedLivyBatchResponse', + 'ExtendedLivyListBatchResponse', + 'ExtendedLivyListSessionResponse', + 'ExtendedLivySessionRequest', + 'ExtendedLivySessionResponse', + 'GetAccessControlInfoRequest', + 'HistoryServerDataResponse', + 'HistoryServerDiagnosticResponse', + 'HistoryServerDiagnosticResponseData', + 'HistoryServerGraphResponse', + 'HistoryServerGraphResponseData', + 'HistoryServerPropertiesResponse', + 'Jobs', + 'LivyBatchStateInformation', + 'LivyRequestBase', + 'LivySessionStateInformation', + 'LivyStatementCancellationResponse', + 'LivyStatementOutput', + 'LivyStatementOutputData', + 'LivyStatementRequestBody', + 'LivyStatementResponseBody', + 'LivyStatementsResponseBody', + 'Option', + 'SchedulerInformation', + 'SetWorkspaceAdministratorsRequest', + 'SparkJob', + 'SparkJobListViewResponse', + 'SparkServicePluginInformation', + 'Stages', + 'Tables', + 'WorkspaceAccessControlResponse', + 'ErrorSource', + 'JobResult', + 'JobType', + 'PluginCurrentState', + 'SchedulerCurrentState', +] diff --git a/sdk/synapse/azure-synapse/azure/synapse/models/_models.py b/sdk/synapse/azure-synapse/azure/synapse/models/_models.py new file mode 100644 index 000000000000..f6ddce541a67 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/models/_models.py @@ -0,0 +1,1468 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class Config(msrest.serialization.Model): + """Config. + + :param is_hive_enabled: + :type is_hive_enabled: bool + """ + + _attribute_map = { + 'is_hive_enabled': {'key': 'isHiveEnabled', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(Config, self).__init__(**kwargs) + self.is_hive_enabled = kwargs.get('is_hive_enabled', None) + + +class Data(msrest.serialization.Model): + """Data. + + :param handler_path: + :type handler_path: str + :param inputs: + :type inputs: list[str] + :param outputs: + :type outputs: list[str] + :param tables: + :type tables: ~azure.synapse.models.Tables + :param config: + :type config: ~azure.synapse.models.Config + """ + + _attribute_map = { + 'handler_path': {'key': 'handlerPath', 'type': 'str'}, + 'inputs': {'key': 'inputs', 'type': '[str]'}, + 'outputs': {'key': 'outputs', 'type': '[str]'}, + 'tables': {'key': 'tables', 'type': 'Tables'}, + 'config': {'key': 'config', 'type': 'Config'}, + } + + def __init__( + self, + **kwargs + ): + super(Data, self).__init__(**kwargs) + self.handler_path = kwargs.get('handler_path', None) + self.inputs = kwargs.get('inputs', None) + self.outputs = kwargs.get('outputs', None) + self.tables = kwargs.get('tables', None) + self.config = kwargs.get('config', None) + + +class Edge(msrest.serialization.Model): + """Edge. + + :param id: + :type id: int + :param child_id: + :type child_id: int + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'int'}, + 'child_id': {'key': 'childId', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(Edge, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.child_id = kwargs.get('child_id', None) + + +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param target: + :type target: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + + +class ErrorInformation(msrest.serialization.Model): + """ErrorInformation. + + :param message: + :type message: str + :param error_code: + :type error_code: str + :param source: Possible values include: 'System', 'User', 'Unknown', 'Dependency'. + :type source: str or ~azure.synapse.models.ErrorSource + """ + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'source': {'key': 'source', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorInformation, self).__init__(**kwargs) + self.message = kwargs.get('message', None) + self.error_code = kwargs.get('error_code', None) + self.source = kwargs.get('source', None) + + +class ErrorResponseException(HttpResponseError): + """Server responded with exception of type: 'ErrorResponse'. + + :param response: Server response to be deserialized. + :param error_model: A deserialized model of the response body as model. + """ + + def __init__(self, response, error_model): + self.error = error_model + super(ErrorResponseException, self).__init__(response=response, error_model=error_model) + + @classmethod + def from_response(cls, response, deserialize): + """Deserialize this response as this exception, or a subclass of this exception. + + :param response: Server response to be deserialized. + :param deserialize: A deserializer + """ + model_name = 'ErrorResponse' + error = deserialize(model_name, response) + if error is None: + error = deserialize.dependencies[model_name]() + return error._EXCEPTION_TYPE(response, error) + + +class ErrorResponse(msrest.serialization.Model): + """ErrorResponse. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param target: + :type target: str + :param details: + :type details: list[~azure.synapse.models.ErrorDetail] + """ + _EXCEPTION_TYPE = ErrorResponseException + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + } + + def __init__( + self, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.message = kwargs.get('message', None) + self.target = kwargs.get('target', None) + self.details = kwargs.get('details', None) + + +class Executors(msrest.serialization.Model): + """Executors. + + :param is_dynamic_allocation_on: + :type is_dynamic_allocation_on: bool + :param executor_instances: + :type executor_instances: int + :param start_time: + :type start_time: int + :param end_time: + :type end_time: int + :param events: + :type events: list[int] + :param executor_cores: + :type executor_cores: str + :param executor_memory: + :type executor_memory: str + """ + + _attribute_map = { + 'is_dynamic_allocation_on': {'key': 'isDynamicAllocationOn', 'type': 'bool'}, + 'executor_instances': {'key': 'executorInstances', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'int'}, + 'end_time': {'key': 'endTime', 'type': 'int'}, + 'events': {'key': 'events', 'type': '[int]'}, + 'executor_cores': {'key': 'executorCores', 'type': 'str'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Executors, self).__init__(**kwargs) + self.is_dynamic_allocation_on = kwargs.get('is_dynamic_allocation_on', None) + self.executor_instances = kwargs.get('executor_instances', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.events = kwargs.get('events', None) + self.executor_cores = kwargs.get('executor_cores', None) + self.executor_memory = kwargs.get('executor_memory', None) + + +class ExtendedLivyBatchRequest(msrest.serialization.Model): + """ExtendedLivyBatchRequest. + + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param artifact_id: + :type artifact_id: str + :param name: + :type name: str + :param file: + :type file: str + :param class_name: + :type class_name: str + :param args: + :type args: list[str] + :param jars: + :type jars: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param conf: Dictionary of :code:``. + :type conf: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param num_executors: + :type num_executors: int + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'conf': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLivyBatchRequest, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.artifact_id = kwargs.get('artifact_id', None) + self.name = kwargs.get('name', None) + self.file = kwargs.get('file', None) + self.class_name = kwargs.get('class_name', None) + self.args = kwargs.get('args', None) + self.jars = kwargs.get('jars', None) + self.files = kwargs.get('files', None) + self.archives = kwargs.get('archives', None) + self.conf = kwargs.get('conf', None) + self.driver_memory = kwargs.get('driver_memory', None) + self.driver_cores = kwargs.get('driver_cores', None) + self.executor_memory = kwargs.get('executor_memory', None) + self.executor_cores = kwargs.get('executor_cores', None) + self.num_executors = kwargs.get('num_executors', None) + + +class ExtendedLivyBatchResponse(msrest.serialization.Model): + """ExtendedLivyBatchResponse. + + :param livy_info: + :type livy_info: ~azure.synapse.models.LivyBatchStateInformation + :param name: + :type name: str + :param workspace_name: + :type workspace_name: str + :param spark_pool_name: + :type spark_pool_name: str + :param submitter_name: + :type submitter_name: str + :param submitter_id: + :type submitter_id: str + :param artifact_id: + :type artifact_id: str + :param job_type: Possible values include: 'SparkBatch', 'SparkSession'. + :type job_type: str or ~azure.synapse.models.JobType + :param result: Possible values include: 'Uncertain', 'Succeeded', 'Failed', 'Cancelled'. + :type result: str or ~azure.synapse.models.JobResult + :param scheduler_info: + :type scheduler_info: ~azure.synapse.models.SchedulerInformation + :param plugin_info: + :type plugin_info: ~azure.synapse.models.SparkServicePluginInformation + :param error_info: + :type error_info: list[~azure.synapse.models.ErrorInformation] + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param id: + :type id: int + :param app_id: + :type app_id: str + :param app_info: Dictionary of :code:``. + :type app_info: dict[str, str] + :param state: + :type state: str + :param log: + :type log: list[str] + """ + + _attribute_map = { + 'livy_info': {'key': 'livyInfo', 'type': 'LivyBatchStateInformation'}, + 'name': {'key': 'name', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, + 'submitter_name': {'key': 'submitterName', 'type': 'str'}, + 'submitter_id': {'key': 'submitterId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'result': {'key': 'result', 'type': 'str'}, + 'scheduler_info': {'key': 'schedulerInfo', 'type': 'SchedulerInformation'}, + 'plugin_info': {'key': 'pluginInfo', 'type': 'SparkServicePluginInformation'}, + 'error_info': {'key': 'errorInfo', 'type': '[ErrorInformation]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'id': {'key': 'id', 'type': 'int'}, + 'app_id': {'key': 'appId', 'type': 'str'}, + 'app_info': {'key': 'appInfo', 'type': '{str}'}, + 'state': {'key': 'state', 'type': 'str'}, + 'log': {'key': 'log', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLivyBatchResponse, self).__init__(**kwargs) + self.livy_info = kwargs.get('livy_info', None) + self.name = kwargs.get('name', None) + self.workspace_name = kwargs.get('workspace_name', None) + self.spark_pool_name = kwargs.get('spark_pool_name', None) + self.submitter_name = kwargs.get('submitter_name', None) + self.submitter_id = kwargs.get('submitter_id', None) + self.artifact_id = kwargs.get('artifact_id', None) + self.job_type = kwargs.get('job_type', None) + self.result = kwargs.get('result', None) + self.scheduler_info = kwargs.get('scheduler_info', None) + self.plugin_info = kwargs.get('plugin_info', None) + self.error_info = kwargs.get('error_info', None) + self.tags = kwargs.get('tags', None) + self.id = kwargs.get('id', None) + self.app_id = kwargs.get('app_id', None) + self.app_info = kwargs.get('app_info', None) + self.state = kwargs.get('state', None) + self.log = kwargs.get('log', None) + + +class ExtendedLivyListBatchResponse(msrest.serialization.Model): + """ExtendedLivyListBatchResponse. + + :param from_property: + :type from_property: int + :param total: + :type total: int + :param sessions: + :type sessions: list[~azure.synapse.models.ExtendedLivyBatchResponse] + """ + + _attribute_map = { + 'from_property': {'key': 'from', 'type': 'int'}, + 'total': {'key': 'total', 'type': 'int'}, + 'sessions': {'key': 'sessions', 'type': '[ExtendedLivyBatchResponse]'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLivyListBatchResponse, self).__init__(**kwargs) + self.from_property = kwargs.get('from_property', None) + self.total = kwargs.get('total', None) + self.sessions = kwargs.get('sessions', None) + + +class ExtendedLivyListSessionResponse(msrest.serialization.Model): + """ExtendedLivyListSessionResponse. + + :param from_property: + :type from_property: int + :param total: + :type total: int + :param sessions: + :type sessions: list[~azure.synapse.models.ExtendedLivySessionResponse] + """ + + _attribute_map = { + 'from_property': {'key': 'from', 'type': 'int'}, + 'total': {'key': 'total', 'type': 'int'}, + 'sessions': {'key': 'sessions', 'type': '[ExtendedLivySessionResponse]'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLivyListSessionResponse, self).__init__(**kwargs) + self.from_property = kwargs.get('from_property', None) + self.total = kwargs.get('total', None) + self.sessions = kwargs.get('sessions', None) + + +class ExtendedLivySessionRequest(msrest.serialization.Model): + """ExtendedLivySessionRequest. + + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param artifact_id: + :type artifact_id: str + :param name: + :type name: str + :param file: + :type file: str + :param class_name: + :type class_name: str + :param args: + :type args: list[str] + :param jars: + :type jars: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param conf: Dictionary of :code:``. + :type conf: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param num_executors: + :type num_executors: int + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'conf': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLivySessionRequest, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.artifact_id = kwargs.get('artifact_id', None) + self.name = kwargs.get('name', None) + self.file = kwargs.get('file', None) + self.class_name = kwargs.get('class_name', None) + self.args = kwargs.get('args', None) + self.jars = kwargs.get('jars', None) + self.files = kwargs.get('files', None) + self.archives = kwargs.get('archives', None) + self.conf = kwargs.get('conf', None) + self.driver_memory = kwargs.get('driver_memory', None) + self.driver_cores = kwargs.get('driver_cores', None) + self.executor_memory = kwargs.get('executor_memory', None) + self.executor_cores = kwargs.get('executor_cores', None) + self.num_executors = kwargs.get('num_executors', None) + + +class ExtendedLivySessionResponse(msrest.serialization.Model): + """ExtendedLivySessionResponse. + + :param livy_info: + :type livy_info: ~azure.synapse.models.LivySessionStateInformation + :param name: + :type name: str + :param workspace_name: + :type workspace_name: str + :param spark_pool_name: + :type spark_pool_name: str + :param submitter_name: + :type submitter_name: str + :param submitter_id: + :type submitter_id: str + :param artifact_id: + :type artifact_id: str + :param job_type: Possible values include: 'SparkBatch', 'SparkSession'. + :type job_type: str or ~azure.synapse.models.JobType + :param result: Possible values include: 'Uncertain', 'Succeeded', 'Failed', 'Cancelled'. + :type result: str or ~azure.synapse.models.JobResult + :param scheduler_info: + :type scheduler_info: ~azure.synapse.models.SchedulerInformation + :param plugin_info: + :type plugin_info: ~azure.synapse.models.SparkServicePluginInformation + :param error_info: + :type error_info: list[~azure.synapse.models.ErrorInformation] + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param id: + :type id: int + :param app_id: + :type app_id: str + :param app_info: Dictionary of :code:``. + :type app_info: dict[str, str] + :param state: + :type state: str + :param log: + :type log: list[str] + """ + + _attribute_map = { + 'livy_info': {'key': 'livyInfo', 'type': 'LivySessionStateInformation'}, + 'name': {'key': 'name', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, + 'submitter_name': {'key': 'submitterName', 'type': 'str'}, + 'submitter_id': {'key': 'submitterId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'result': {'key': 'result', 'type': 'str'}, + 'scheduler_info': {'key': 'schedulerInfo', 'type': 'SchedulerInformation'}, + 'plugin_info': {'key': 'pluginInfo', 'type': 'SparkServicePluginInformation'}, + 'error_info': {'key': 'errorInfo', 'type': '[ErrorInformation]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'id': {'key': 'id', 'type': 'int'}, + 'app_id': {'key': 'appId', 'type': 'str'}, + 'app_info': {'key': 'appInfo', 'type': '{str}'}, + 'state': {'key': 'state', 'type': 'str'}, + 'log': {'key': 'log', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(ExtendedLivySessionResponse, self).__init__(**kwargs) + self.livy_info = kwargs.get('livy_info', None) + self.name = kwargs.get('name', None) + self.workspace_name = kwargs.get('workspace_name', None) + self.spark_pool_name = kwargs.get('spark_pool_name', None) + self.submitter_name = kwargs.get('submitter_name', None) + self.submitter_id = kwargs.get('submitter_id', None) + self.artifact_id = kwargs.get('artifact_id', None) + self.job_type = kwargs.get('job_type', None) + self.result = kwargs.get('result', None) + self.scheduler_info = kwargs.get('scheduler_info', None) + self.plugin_info = kwargs.get('plugin_info', None) + self.error_info = kwargs.get('error_info', None) + self.tags = kwargs.get('tags', None) + self.id = kwargs.get('id', None) + self.app_id = kwargs.get('app_id', None) + self.app_info = kwargs.get('app_info', None) + self.state = kwargs.get('state', None) + self.log = kwargs.get('log', None) + + +class GetAccessControlInfoRequest(msrest.serialization.Model): + """GetAccessControlInfoRequest. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar artifact_type: Default value: "Workspace". + :vartype artifact_type: str + :param artifact_name: + :type artifact_name: str + """ + + _validation = { + 'artifact_type': {'constant': True}, + } + + _attribute_map = { + 'artifact_type': {'key': 'artifactType', 'type': 'str'}, + 'artifact_name': {'key': 'artifactName', 'type': 'str'}, + } + + artifact_type = "Workspace" + + def __init__( + self, + **kwargs + ): + super(GetAccessControlInfoRequest, self).__init__(**kwargs) + self.artifact_name = kwargs.get('artifact_name', None) + + +class HistoryServerDataResponse(msrest.serialization.Model): + """HistoryServerDataResponse. + + :param data: + :type data: ~azure.synapse.models.Data + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': 'Data'}, + } + + def __init__( + self, + **kwargs + ): + super(HistoryServerDataResponse, self).__init__(**kwargs) + self.data = kwargs.get('data', None) + + +class HistoryServerDiagnosticResponse(msrest.serialization.Model): + """HistoryServerDiagnosticResponse. + + :param data: + :type data: ~azure.synapse.models.HistoryServerDiagnosticResponseData + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': 'HistoryServerDiagnosticResponseData'}, + } + + def __init__( + self, + **kwargs + ): + super(HistoryServerDiagnosticResponse, self).__init__(**kwargs) + self.data = kwargs.get('data', None) + + +class HistoryServerDiagnosticResponseData(msrest.serialization.Model): + """HistoryServerDiagnosticResponseData. + + :param stages: + :type stages: ~azure.synapse.models.Stages + :param executors: + :type executors: ~azure.synapse.models.Executors + """ + + _attribute_map = { + 'stages': {'key': 'stages', 'type': 'Stages'}, + 'executors': {'key': 'executors', 'type': 'Executors'}, + } + + def __init__( + self, + **kwargs + ): + super(HistoryServerDiagnosticResponseData, self).__init__(**kwargs) + self.stages = kwargs.get('stages', None) + self.executors = kwargs.get('executors', None) + + +class HistoryServerGraphResponse(msrest.serialization.Model): + """HistoryServerGraphResponse. + + :param data: + :type data: ~azure.synapse.models.HistoryServerGraphResponseData + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': 'HistoryServerGraphResponseData'}, + } + + def __init__( + self, + **kwargs + ): + super(HistoryServerGraphResponse, self).__init__(**kwargs) + self.data = kwargs.get('data', None) + + +class HistoryServerGraphResponseData(msrest.serialization.Model): + """HistoryServerGraphResponseData. + + :param is_app_finished: + :type is_app_finished: bool + :param jobs: + :type jobs: ~azure.synapse.models.Jobs + :param stages: + :type stages: ~azure.synapse.models.Stages + """ + + _attribute_map = { + 'is_app_finished': {'key': 'isAppFinished', 'type': 'bool'}, + 'jobs': {'key': 'jobs', 'type': 'Jobs'}, + 'stages': {'key': 'stages', 'type': 'Stages'}, + } + + def __init__( + self, + **kwargs + ): + super(HistoryServerGraphResponseData, self).__init__(**kwargs) + self.is_app_finished = kwargs.get('is_app_finished', None) + self.jobs = kwargs.get('jobs', None) + self.stages = kwargs.get('stages', None) + + +class HistoryServerPropertiesResponse(msrest.serialization.Model): + """HistoryServerPropertiesResponse. + + :param web_proxy_endpoint: + :type web_proxy_endpoint: str + """ + + _attribute_map = { + 'web_proxy_endpoint': {'key': 'webProxyEndpoint', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(HistoryServerPropertiesResponse, self).__init__(**kwargs) + self.web_proxy_endpoint = kwargs.get('web_proxy_endpoint', None) + + +class Jobs(msrest.serialization.Model): + """Jobs. + + :param time: + :type time: int + :param start_time: + :type start_time: int + :param id: + :type id: int + :param edges: + :type edges: list[~azure.synapse.models.Edge] + :param nodes: + :type nodes: list[int] + """ + + _attribute_map = { + 'time': {'key': 'time', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'int'}, + 'id': {'key': 'id', 'type': 'int'}, + 'edges': {'key': 'edges', 'type': '[Edge]'}, + 'nodes': {'key': 'nodes', 'type': '[int]'}, + } + + def __init__( + self, + **kwargs + ): + super(Jobs, self).__init__(**kwargs) + self.time = kwargs.get('time', None) + self.start_time = kwargs.get('start_time', None) + self.id = kwargs.get('id', None) + self.edges = kwargs.get('edges', None) + self.nodes = kwargs.get('nodes', None) + + +class LivyBatchStateInformation(msrest.serialization.Model): + """LivyBatchStateInformation. + + :param not_started_at: + :type not_started_at: ~datetime.datetime + :param starting_at: + :type starting_at: ~datetime.datetime + :param running_at: + :type running_at: ~datetime.datetime + :param dead_at: + :type dead_at: ~datetime.datetime + :param success_at: + :type success_at: ~datetime.datetime + :param killed_at: + :type killed_at: ~datetime.datetime + :param recovering_at: + :type recovering_at: ~datetime.datetime + :param current_state: + :type current_state: str + :param job_creation_request: + :type job_creation_request: ~azure.synapse.models.LivyRequestBase + """ + + _attribute_map = { + 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, + 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, + 'running_at': {'key': 'runningAt', 'type': 'iso-8601'}, + 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, + 'success_at': {'key': 'successAt', 'type': 'iso-8601'}, + 'killed_at': {'key': 'killedAt', 'type': 'iso-8601'}, + 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'LivyRequestBase'}, + } + + def __init__( + self, + **kwargs + ): + super(LivyBatchStateInformation, self).__init__(**kwargs) + self.not_started_at = kwargs.get('not_started_at', None) + self.starting_at = kwargs.get('starting_at', None) + self.running_at = kwargs.get('running_at', None) + self.dead_at = kwargs.get('dead_at', None) + self.success_at = kwargs.get('success_at', None) + self.killed_at = kwargs.get('killed_at', None) + self.recovering_at = kwargs.get('recovering_at', None) + self.current_state = kwargs.get('current_state', None) + self.job_creation_request = kwargs.get('job_creation_request', None) + + +class LivyRequestBase(msrest.serialization.Model): + """LivyRequestBase. + + :param name: + :type name: str + :param file: + :type file: str + :param class_name: + :type class_name: str + :param args: + :type args: list[str] + :param jars: + :type jars: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param conf: Dictionary of :code:``. + :type conf: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param num_executors: + :type num_executors: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'conf': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(LivyRequestBase, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.file = kwargs.get('file', None) + self.class_name = kwargs.get('class_name', None) + self.args = kwargs.get('args', None) + self.jars = kwargs.get('jars', None) + self.files = kwargs.get('files', None) + self.archives = kwargs.get('archives', None) + self.conf = kwargs.get('conf', None) + self.driver_memory = kwargs.get('driver_memory', None) + self.driver_cores = kwargs.get('driver_cores', None) + self.executor_memory = kwargs.get('executor_memory', None) + self.executor_cores = kwargs.get('executor_cores', None) + self.num_executors = kwargs.get('num_executors', None) + + +class LivySessionStateInformation(msrest.serialization.Model): + """LivySessionStateInformation. + + :param not_started_at: + :type not_started_at: ~datetime.datetime + :param starting_at: + :type starting_at: ~datetime.datetime + :param idle_at: + :type idle_at: ~datetime.datetime + :param dead_at: + :type dead_at: ~datetime.datetime + :param shutting_down_at: + :type shutting_down_at: ~datetime.datetime + :param killed_at: + :type killed_at: ~datetime.datetime + :param recovering_at: + :type recovering_at: ~datetime.datetime + :param busy_at: + :type busy_at: ~datetime.datetime + :param error_at: + :type error_at: ~datetime.datetime + :param current_state: + :type current_state: str + :param job_creation_request: + :type job_creation_request: ~azure.synapse.models.LivyRequestBase + """ + + _attribute_map = { + 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, + 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, + 'idle_at': {'key': 'idleAt', 'type': 'iso-8601'}, + 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, + 'shutting_down_at': {'key': 'shuttingDownAt', 'type': 'iso-8601'}, + 'killed_at': {'key': 'killedAt', 'type': 'iso-8601'}, + 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'busy_at': {'key': 'busyAt', 'type': 'iso-8601'}, + 'error_at': {'key': 'errorAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'LivyRequestBase'}, + } + + def __init__( + self, + **kwargs + ): + super(LivySessionStateInformation, self).__init__(**kwargs) + self.not_started_at = kwargs.get('not_started_at', None) + self.starting_at = kwargs.get('starting_at', None) + self.idle_at = kwargs.get('idle_at', None) + self.dead_at = kwargs.get('dead_at', None) + self.shutting_down_at = kwargs.get('shutting_down_at', None) + self.killed_at = kwargs.get('killed_at', None) + self.recovering_at = kwargs.get('recovering_at', None) + self.busy_at = kwargs.get('busy_at', None) + self.error_at = kwargs.get('error_at', None) + self.current_state = kwargs.get('current_state', None) + self.job_creation_request = kwargs.get('job_creation_request', None) + + +class LivyStatementCancellationResponse(msrest.serialization.Model): + """LivyStatementCancellationResponse. + + :param msg: + :type msg: str + """ + + _attribute_map = { + 'msg': {'key': 'msg', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LivyStatementCancellationResponse, self).__init__(**kwargs) + self.msg = kwargs.get('msg', None) + + +class LivyStatementOutput(msrest.serialization.Model): + """LivyStatementOutput. + + :param status: + :type status: str + :param execution_count: + :type execution_count: int + :param data: + :type data: ~azure.synapse.models.LivyStatementOutputData + :param ename: + :type ename: str + :param evalue: + :type evalue: str + :param traceback: + :type traceback: list[str] + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'execution_count': {'key': 'execution_count', 'type': 'int'}, + 'data': {'key': 'data', 'type': 'LivyStatementOutputData'}, + 'ename': {'key': 'ename', 'type': 'str'}, + 'evalue': {'key': 'evalue', 'type': 'str'}, + 'traceback': {'key': 'traceback', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(LivyStatementOutput, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.execution_count = kwargs.get('execution_count', None) + self.data = kwargs.get('data', None) + self.ename = kwargs.get('ename', None) + self.evalue = kwargs.get('evalue', None) + self.traceback = kwargs.get('traceback', None) + + +class LivyStatementOutputData(msrest.serialization.Model): + """LivyStatementOutputData. + + """ + + _attribute_map = { + } + + def __init__( + self, + **kwargs + ): + super(LivyStatementOutputData, self).__init__(**kwargs) + + +class LivyStatementRequestBody(msrest.serialization.Model): + """LivyStatementRequestBody. + + :param code: + :type code: str + :param kind: + :type kind: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LivyStatementRequestBody, self).__init__(**kwargs) + self.code = kwargs.get('code', None) + self.kind = kwargs.get('kind', None) + + +class LivyStatementResponseBody(msrest.serialization.Model): + """LivyStatementResponseBody. + + :param id: + :type id: int + :param code: + :type code: str + :param state: + :type state: str + :param output: + :type output: ~azure.synapse.models.LivyStatementOutput + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'int'}, + 'code': {'key': 'code', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'output': {'key': 'output', 'type': 'LivyStatementOutput'}, + } + + def __init__( + self, + **kwargs + ): + super(LivyStatementResponseBody, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.code = kwargs.get('code', None) + self.state = kwargs.get('state', None) + self.output = kwargs.get('output', None) + + +class LivyStatementsResponseBody(msrest.serialization.Model): + """LivyStatementsResponseBody. + + :param total_statements: + :type total_statements: int + :param statements: + :type statements: list[~azure.synapse.models.LivyStatementResponseBody] + """ + + _attribute_map = { + 'total_statements': {'key': 'total_statements', 'type': 'int'}, + 'statements': {'key': 'statements', 'type': '[LivyStatementResponseBody]'}, + } + + def __init__( + self, + **kwargs + ): + super(LivyStatementsResponseBody, self).__init__(**kwargs) + self.total_statements = kwargs.get('total_statements', None) + self.statements = kwargs.get('statements', None) + + +class Option(msrest.serialization.Model): + """Option. + + :param additional_prop1: + :type additional_prop1: str + :param additional_prop2: + :type additional_prop2: str + :param additional_prop3: + :type additional_prop3: str + """ + + _attribute_map = { + 'additional_prop1': {'key': 'additionalProp1', 'type': 'str'}, + 'additional_prop2': {'key': 'additionalProp2', 'type': 'str'}, + 'additional_prop3': {'key': 'additionalProp3', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Option, self).__init__(**kwargs) + self.additional_prop1 = kwargs.get('additional_prop1', None) + self.additional_prop2 = kwargs.get('additional_prop2', None) + self.additional_prop3 = kwargs.get('additional_prop3', None) + + +class SchedulerInformation(msrest.serialization.Model): + """SchedulerInformation. + + :param submitted_at: + :type submitted_at: ~datetime.datetime + :param scheduled_at: + :type scheduled_at: ~datetime.datetime + :param ended_at: + :type ended_at: ~datetime.datetime + :param cancellation_requested_at: + :type cancellation_requested_at: ~datetime.datetime + :param current_state: Possible values include: 'Queued', 'Scheduled', 'Ended'. + :type current_state: str or ~azure.synapse.models.SchedulerCurrentState + """ + + _attribute_map = { + 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, + 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, + 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, + 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SchedulerInformation, self).__init__(**kwargs) + self.submitted_at = kwargs.get('submitted_at', None) + self.scheduled_at = kwargs.get('scheduled_at', None) + self.ended_at = kwargs.get('ended_at', None) + self.cancellation_requested_at = kwargs.get('cancellation_requested_at', None) + self.current_state = kwargs.get('current_state', None) + + +class SetWorkspaceAdministratorsRequest(msrest.serialization.Model): + """SetWorkspaceAdministratorsRequest. + + :param administrators: + :type administrators: list[str] + """ + + _attribute_map = { + 'administrators': {'key': 'administrators', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(SetWorkspaceAdministratorsRequest, self).__init__(**kwargs) + self.administrators = kwargs.get('administrators', None) + + +class SparkJob(msrest.serialization.Model): + """SparkJob. + + :param state: + :type state: str + :param name: + :type name: str + :param submitter: + :type submitter: str + :param compute: + :type compute: str + :param spark_application_id: + :type spark_application_id: str + :param livy_id: + :type livy_id: str + :param timing: + :type timing: list[str] + :param spark_job_definition: + :type spark_job_definition: str + :param pipeline: + :type pipeline: list[~azure.synapse.models.SparkJob] + :param job_type: + :type job_type: str + :param submit_time: + :type submit_time: ~datetime.datetime + :param end_time: + :type end_time: ~datetime.datetime + :param queued_duration: + :type queued_duration: str + :param running_duration: + :type running_duration: str + :param total_duration: + :type total_duration: str + """ + + _attribute_map = { + 'state': {'key': 'state', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'submitter': {'key': 'submitter', 'type': 'str'}, + 'compute': {'key': 'compute', 'type': 'str'}, + 'spark_application_id': {'key': 'sparkApplicationId', 'type': 'str'}, + 'livy_id': {'key': 'livyId', 'type': 'str'}, + 'timing': {'key': 'timing', 'type': '[str]'}, + 'spark_job_definition': {'key': 'sparkJobDefinition', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': '[SparkJob]'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'queued_duration': {'key': 'queuedDuration', 'type': 'str'}, + 'running_duration': {'key': 'runningDuration', 'type': 'str'}, + 'total_duration': {'key': 'totalDuration', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkJob, self).__init__(**kwargs) + self.state = kwargs.get('state', None) + self.name = kwargs.get('name', None) + self.submitter = kwargs.get('submitter', None) + self.compute = kwargs.get('compute', None) + self.spark_application_id = kwargs.get('spark_application_id', None) + self.livy_id = kwargs.get('livy_id', None) + self.timing = kwargs.get('timing', None) + self.spark_job_definition = kwargs.get('spark_job_definition', None) + self.pipeline = kwargs.get('pipeline', None) + self.job_type = kwargs.get('job_type', None) + self.submit_time = kwargs.get('submit_time', None) + self.end_time = kwargs.get('end_time', None) + self.queued_duration = kwargs.get('queued_duration', None) + self.running_duration = kwargs.get('running_duration', None) + self.total_duration = kwargs.get('total_duration', None) + + +class SparkJobListViewResponse(msrest.serialization.Model): + """SparkJobListViewResponse. + + :param n_jobs: + :type n_jobs: int + :param spark_jobs: + :type spark_jobs: list[~azure.synapse.models.SparkJob] + """ + + _attribute_map = { + 'n_jobs': {'key': 'nJobs', 'type': 'int'}, + 'spark_jobs': {'key': 'sparkJobs', 'type': '[SparkJob]'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkJobListViewResponse, self).__init__(**kwargs) + self.n_jobs = kwargs.get('n_jobs', None) + self.spark_jobs = kwargs.get('spark_jobs', None) + + +class SparkServicePluginInformation(msrest.serialization.Model): + """SparkServicePluginInformation. + + :param preparation_started_at: + :type preparation_started_at: ~datetime.datetime + :param resource_acquisition_started_at: + :type resource_acquisition_started_at: ~datetime.datetime + :param submission_started_at: + :type submission_started_at: ~datetime.datetime + :param monitoring_started_at: + :type monitoring_started_at: ~datetime.datetime + :param cleanup_started_at: + :type cleanup_started_at: ~datetime.datetime + :param current_state: Possible values include: 'Preparation', 'ResourceAcquisition', 'Queued', + 'Submission', 'Monitoring', 'Cleanup', 'Ended'. + :type current_state: str or ~azure.synapse.models.PluginCurrentState + """ + + _attribute_map = { + 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, + 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, + 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, + 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, + 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SparkServicePluginInformation, self).__init__(**kwargs) + self.preparation_started_at = kwargs.get('preparation_started_at', None) + self.resource_acquisition_started_at = kwargs.get('resource_acquisition_started_at', None) + self.submission_started_at = kwargs.get('submission_started_at', None) + self.monitoring_started_at = kwargs.get('monitoring_started_at', None) + self.cleanup_started_at = kwargs.get('cleanup_started_at', None) + self.current_state = kwargs.get('current_state', None) + + +class Stages(msrest.serialization.Model): + """Stages. + + :param data_avg: + :type data_avg: int + :param time_avg: + :type time_avg: int + :param id: + :type id: int + :param attempt_id: + :type attempt_id: int + :param name: + :type name: str + :param data_skew_task: + :type data_skew_task: list[int] + :param time_skew_task: + :type time_skew_task: list[int] + :param tasks: + :type tasks: list[int] + """ + + _attribute_map = { + 'data_avg': {'key': 'dataAvg', 'type': 'int'}, + 'time_avg': {'key': 'timeAvg', 'type': 'int'}, + 'id': {'key': 'id', 'type': 'int'}, + 'attempt_id': {'key': 'attemptId', 'type': 'int'}, + 'name': {'key': 'name', 'type': 'str'}, + 'data_skew_task': {'key': 'dataSkewTask', 'type': '[int]'}, + 'time_skew_task': {'key': 'timeSkewTask', 'type': '[int]'}, + 'tasks': {'key': 'tasks', 'type': '[int]'}, + } + + def __init__( + self, + **kwargs + ): + super(Stages, self).__init__(**kwargs) + self.data_avg = kwargs.get('data_avg', None) + self.time_avg = kwargs.get('time_avg', None) + self.id = kwargs.get('id', None) + self.attempt_id = kwargs.get('attempt_id', None) + self.name = kwargs.get('name', None) + self.data_skew_task = kwargs.get('data_skew_task', None) + self.time_skew_task = kwargs.get('time_skew_task', None) + self.tasks = kwargs.get('tasks', None) + + +class Tables(msrest.serialization.Model): + """Tables. + + :param operation: + :type operation: str + :param options: + :type options: list[~azure.synapse.models.Option] + """ + + _attribute_map = { + 'operation': {'key': 'operation', 'type': 'str'}, + 'options': {'key': 'options', 'type': '[Option]'}, + } + + def __init__( + self, + **kwargs + ): + super(Tables, self).__init__(**kwargs) + self.operation = kwargs.get('operation', None) + self.options = kwargs.get('options', None) + + +class WorkspaceAccessControlResponse(msrest.serialization.Model): + """WorkspaceAccessControlResponse. + + :param administrators: + :type administrators: list[str] + :param etag: + :type etag: str + :param access_control_list: Dictionary of + . + :type access_control_list: dict[str, list[str]] + """ + + _attribute_map = { + 'administrators': {'key': 'administrators', 'type': '[str]'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'access_control_list': {'key': 'accessControlList', 'type': '{[str]}'}, + } + + def __init__( + self, + **kwargs + ): + super(WorkspaceAccessControlResponse, self).__init__(**kwargs) + self.administrators = kwargs.get('administrators', None) + self.etag = kwargs.get('etag', None) + self.access_control_list = kwargs.get('access_control_list', None) diff --git a/sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py b/sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py new file mode 100644 index 000000000000..9d9c28a5a74e --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py @@ -0,0 +1,1714 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +import datetime +from typing import Dict, List, Optional, Union + +from azure.core.exceptions import HttpResponseError +import msrest.serialization + + +class Config(msrest.serialization.Model): + """Config. + + :param is_hive_enabled: + :type is_hive_enabled: bool + """ + + _attribute_map = { + 'is_hive_enabled': {'key': 'isHiveEnabled', 'type': 'bool'}, + } + + def __init__( + self, + *, + is_hive_enabled: Optional[bool] = None, + **kwargs + ): + super(Config, self).__init__(**kwargs) + self.is_hive_enabled = is_hive_enabled + + +class Data(msrest.serialization.Model): + """Data. + + :param handler_path: + :type handler_path: str + :param inputs: + :type inputs: list[str] + :param outputs: + :type outputs: list[str] + :param tables: + :type tables: ~azure.synapse.models.Tables + :param config: + :type config: ~azure.synapse.models.Config + """ + + _attribute_map = { + 'handler_path': {'key': 'handlerPath', 'type': 'str'}, + 'inputs': {'key': 'inputs', 'type': '[str]'}, + 'outputs': {'key': 'outputs', 'type': '[str]'}, + 'tables': {'key': 'tables', 'type': 'Tables'}, + 'config': {'key': 'config', 'type': 'Config'}, + } + + def __init__( + self, + *, + handler_path: Optional[str] = None, + inputs: Optional[List[str]] = None, + outputs: Optional[List[str]] = None, + tables: Optional["Tables"] = None, + config: Optional["Config"] = None, + **kwargs + ): + super(Data, self).__init__(**kwargs) + self.handler_path = handler_path + self.inputs = inputs + self.outputs = outputs + self.tables = tables + self.config = config + + +class Edge(msrest.serialization.Model): + """Edge. + + :param id: + :type id: int + :param child_id: + :type child_id: int + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'int'}, + 'child_id': {'key': 'childId', 'type': 'int'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + child_id: Optional[int] = None, + **kwargs + ): + super(Edge, self).__init__(**kwargs) + self.id = id + self.child_id = child_id + + +class ErrorDetail(msrest.serialization.Model): + """ErrorDetail. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param target: + :type target: str + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + } + + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + **kwargs + ): + super(ErrorDetail, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + + +class ErrorInformation(msrest.serialization.Model): + """ErrorInformation. + + :param message: + :type message: str + :param error_code: + :type error_code: str + :param source: Possible values include: 'System', 'User', 'Unknown', 'Dependency'. + :type source: str or ~azure.synapse.models.ErrorSource + """ + + _attribute_map = { + 'message': {'key': 'message', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'source': {'key': 'source', 'type': 'str'}, + } + + def __init__( + self, + *, + message: Optional[str] = None, + error_code: Optional[str] = None, + source: Optional[Union[str, "ErrorSource"]] = None, + **kwargs + ): + super(ErrorInformation, self).__init__(**kwargs) + self.message = message + self.error_code = error_code + self.source = source + + +class ErrorResponseException(HttpResponseError): + """Server responded with exception of type: 'ErrorResponse'. + + :param response: Server response to be deserialized. + :param error_model: A deserialized model of the response body as model. + """ + + def __init__(self, response, error_model): + self.error = error_model + super(ErrorResponseException, self).__init__(response=response, error_model=error_model) + + @classmethod + def from_response(cls, response, deserialize): + """Deserialize this response as this exception, or a subclass of this exception. + + :param response: Server response to be deserialized. + :param deserialize: A deserializer + """ + model_name = 'ErrorResponse' + error = deserialize(model_name, response) + if error is None: + error = deserialize.dependencies[model_name]() + return error._EXCEPTION_TYPE(response, error) + + +class ErrorResponse(msrest.serialization.Model): + """ErrorResponse. + + All required parameters must be populated in order to send to Azure. + + :param code: Required. + :type code: str + :param message: Required. + :type message: str + :param target: + :type target: str + :param details: + :type details: list[~azure.synapse.models.ErrorDetail] + """ + _EXCEPTION_TYPE = ErrorResponseException + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'message': {'key': 'message', 'type': 'str'}, + 'target': {'key': 'target', 'type': 'str'}, + 'details': {'key': 'details', 'type': '[ErrorDetail]'}, + } + + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + details: Optional[List["ErrorDetail"]] = None, + **kwargs + ): + super(ErrorResponse, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class Executors(msrest.serialization.Model): + """Executors. + + :param is_dynamic_allocation_on: + :type is_dynamic_allocation_on: bool + :param executor_instances: + :type executor_instances: int + :param start_time: + :type start_time: int + :param end_time: + :type end_time: int + :param events: + :type events: list[int] + :param executor_cores: + :type executor_cores: str + :param executor_memory: + :type executor_memory: str + """ + + _attribute_map = { + 'is_dynamic_allocation_on': {'key': 'isDynamicAllocationOn', 'type': 'bool'}, + 'executor_instances': {'key': 'executorInstances', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'int'}, + 'end_time': {'key': 'endTime', 'type': 'int'}, + 'events': {'key': 'events', 'type': '[int]'}, + 'executor_cores': {'key': 'executorCores', 'type': 'str'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + } + + def __init__( + self, + *, + is_dynamic_allocation_on: Optional[bool] = None, + executor_instances: Optional[int] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + events: Optional[List[int]] = None, + executor_cores: Optional[str] = None, + executor_memory: Optional[str] = None, + **kwargs + ): + super(Executors, self).__init__(**kwargs) + self.is_dynamic_allocation_on = is_dynamic_allocation_on + self.executor_instances = executor_instances + self.start_time = start_time + self.end_time = end_time + self.events = events + self.executor_cores = executor_cores + self.executor_memory = executor_memory + + +class ExtendedLivyBatchRequest(msrest.serialization.Model): + """ExtendedLivyBatchRequest. + + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param artifact_id: + :type artifact_id: str + :param name: + :type name: str + :param file: + :type file: str + :param class_name: + :type class_name: str + :param args: + :type args: list[str] + :param jars: + :type jars: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param conf: Dictionary of :code:``. + :type conf: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param num_executors: + :type num_executors: int + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'conf': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + artifact_id: Optional[str] = None, + name: Optional[str] = None, + file: Optional[str] = None, + class_name: Optional[str] = None, + args: Optional[List[str]] = None, + jars: Optional[List[str]] = None, + files: Optional[List[str]] = None, + archives: Optional[List[str]] = None, + conf: Optional[Dict[str, str]] = None, + driver_memory: Optional[str] = None, + driver_cores: Optional[int] = None, + executor_memory: Optional[str] = None, + executor_cores: Optional[int] = None, + num_executors: Optional[int] = None, + **kwargs + ): + super(ExtendedLivyBatchRequest, self).__init__(**kwargs) + self.tags = tags + self.artifact_id = artifact_id + self.name = name + self.file = file + self.class_name = class_name + self.args = args + self.jars = jars + self.files = files + self.archives = archives + self.conf = conf + self.driver_memory = driver_memory + self.driver_cores = driver_cores + self.executor_memory = executor_memory + self.executor_cores = executor_cores + self.num_executors = num_executors + + +class ExtendedLivyBatchResponse(msrest.serialization.Model): + """ExtendedLivyBatchResponse. + + :param livy_info: + :type livy_info: ~azure.synapse.models.LivyBatchStateInformation + :param name: + :type name: str + :param workspace_name: + :type workspace_name: str + :param spark_pool_name: + :type spark_pool_name: str + :param submitter_name: + :type submitter_name: str + :param submitter_id: + :type submitter_id: str + :param artifact_id: + :type artifact_id: str + :param job_type: Possible values include: 'SparkBatch', 'SparkSession'. + :type job_type: str or ~azure.synapse.models.JobType + :param result: Possible values include: 'Uncertain', 'Succeeded', 'Failed', 'Cancelled'. + :type result: str or ~azure.synapse.models.JobResult + :param scheduler_info: + :type scheduler_info: ~azure.synapse.models.SchedulerInformation + :param plugin_info: + :type plugin_info: ~azure.synapse.models.SparkServicePluginInformation + :param error_info: + :type error_info: list[~azure.synapse.models.ErrorInformation] + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param id: + :type id: int + :param app_id: + :type app_id: str + :param app_info: Dictionary of :code:``. + :type app_info: dict[str, str] + :param state: + :type state: str + :param log: + :type log: list[str] + """ + + _attribute_map = { + 'livy_info': {'key': 'livyInfo', 'type': 'LivyBatchStateInformation'}, + 'name': {'key': 'name', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, + 'submitter_name': {'key': 'submitterName', 'type': 'str'}, + 'submitter_id': {'key': 'submitterId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'result': {'key': 'result', 'type': 'str'}, + 'scheduler_info': {'key': 'schedulerInfo', 'type': 'SchedulerInformation'}, + 'plugin_info': {'key': 'pluginInfo', 'type': 'SparkServicePluginInformation'}, + 'error_info': {'key': 'errorInfo', 'type': '[ErrorInformation]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'id': {'key': 'id', 'type': 'int'}, + 'app_id': {'key': 'appId', 'type': 'str'}, + 'app_info': {'key': 'appInfo', 'type': '{str}'}, + 'state': {'key': 'state', 'type': 'str'}, + 'log': {'key': 'log', 'type': '[str]'}, + } + + def __init__( + self, + *, + livy_info: Optional["LivyBatchStateInformation"] = None, + name: Optional[str] = None, + workspace_name: Optional[str] = None, + spark_pool_name: Optional[str] = None, + submitter_name: Optional[str] = None, + submitter_id: Optional[str] = None, + artifact_id: Optional[str] = None, + job_type: Optional[Union[str, "JobType"]] = None, + result: Optional[Union[str, "JobResult"]] = None, + scheduler_info: Optional["SchedulerInformation"] = None, + plugin_info: Optional["SparkServicePluginInformation"] = None, + error_info: Optional[List["ErrorInformation"]] = None, + tags: Optional[Dict[str, str]] = None, + id: Optional[int] = None, + app_id: Optional[str] = None, + app_info: Optional[Dict[str, str]] = None, + state: Optional[str] = None, + log: Optional[List[str]] = None, + **kwargs + ): + super(ExtendedLivyBatchResponse, self).__init__(**kwargs) + self.livy_info = livy_info + self.name = name + self.workspace_name = workspace_name + self.spark_pool_name = spark_pool_name + self.submitter_name = submitter_name + self.submitter_id = submitter_id + self.artifact_id = artifact_id + self.job_type = job_type + self.result = result + self.scheduler_info = scheduler_info + self.plugin_info = plugin_info + self.error_info = error_info + self.tags = tags + self.id = id + self.app_id = app_id + self.app_info = app_info + self.state = state + self.log = log + + +class ExtendedLivyListBatchResponse(msrest.serialization.Model): + """ExtendedLivyListBatchResponse. + + :param from_property: + :type from_property: int + :param total: + :type total: int + :param sessions: + :type sessions: list[~azure.synapse.models.ExtendedLivyBatchResponse] + """ + + _attribute_map = { + 'from_property': {'key': 'from', 'type': 'int'}, + 'total': {'key': 'total', 'type': 'int'}, + 'sessions': {'key': 'sessions', 'type': '[ExtendedLivyBatchResponse]'}, + } + + def __init__( + self, + *, + from_property: Optional[int] = None, + total: Optional[int] = None, + sessions: Optional[List["ExtendedLivyBatchResponse"]] = None, + **kwargs + ): + super(ExtendedLivyListBatchResponse, self).__init__(**kwargs) + self.from_property = from_property + self.total = total + self.sessions = sessions + + +class ExtendedLivyListSessionResponse(msrest.serialization.Model): + """ExtendedLivyListSessionResponse. + + :param from_property: + :type from_property: int + :param total: + :type total: int + :param sessions: + :type sessions: list[~azure.synapse.models.ExtendedLivySessionResponse] + """ + + _attribute_map = { + 'from_property': {'key': 'from', 'type': 'int'}, + 'total': {'key': 'total', 'type': 'int'}, + 'sessions': {'key': 'sessions', 'type': '[ExtendedLivySessionResponse]'}, + } + + def __init__( + self, + *, + from_property: Optional[int] = None, + total: Optional[int] = None, + sessions: Optional[List["ExtendedLivySessionResponse"]] = None, + **kwargs + ): + super(ExtendedLivyListSessionResponse, self).__init__(**kwargs) + self.from_property = from_property + self.total = total + self.sessions = sessions + + +class ExtendedLivySessionRequest(msrest.serialization.Model): + """ExtendedLivySessionRequest. + + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param artifact_id: + :type artifact_id: str + :param name: + :type name: str + :param file: + :type file: str + :param class_name: + :type class_name: str + :param args: + :type args: list[str] + :param jars: + :type jars: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param conf: Dictionary of :code:``. + :type conf: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param num_executors: + :type num_executors: int + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'conf': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + artifact_id: Optional[str] = None, + name: Optional[str] = None, + file: Optional[str] = None, + class_name: Optional[str] = None, + args: Optional[List[str]] = None, + jars: Optional[List[str]] = None, + files: Optional[List[str]] = None, + archives: Optional[List[str]] = None, + conf: Optional[Dict[str, str]] = None, + driver_memory: Optional[str] = None, + driver_cores: Optional[int] = None, + executor_memory: Optional[str] = None, + executor_cores: Optional[int] = None, + num_executors: Optional[int] = None, + **kwargs + ): + super(ExtendedLivySessionRequest, self).__init__(**kwargs) + self.tags = tags + self.artifact_id = artifact_id + self.name = name + self.file = file + self.class_name = class_name + self.args = args + self.jars = jars + self.files = files + self.archives = archives + self.conf = conf + self.driver_memory = driver_memory + self.driver_cores = driver_cores + self.executor_memory = executor_memory + self.executor_cores = executor_cores + self.num_executors = num_executors + + +class ExtendedLivySessionResponse(msrest.serialization.Model): + """ExtendedLivySessionResponse. + + :param livy_info: + :type livy_info: ~azure.synapse.models.LivySessionStateInformation + :param name: + :type name: str + :param workspace_name: + :type workspace_name: str + :param spark_pool_name: + :type spark_pool_name: str + :param submitter_name: + :type submitter_name: str + :param submitter_id: + :type submitter_id: str + :param artifact_id: + :type artifact_id: str + :param job_type: Possible values include: 'SparkBatch', 'SparkSession'. + :type job_type: str or ~azure.synapse.models.JobType + :param result: Possible values include: 'Uncertain', 'Succeeded', 'Failed', 'Cancelled'. + :type result: str or ~azure.synapse.models.JobResult + :param scheduler_info: + :type scheduler_info: ~azure.synapse.models.SchedulerInformation + :param plugin_info: + :type plugin_info: ~azure.synapse.models.SparkServicePluginInformation + :param error_info: + :type error_info: list[~azure.synapse.models.ErrorInformation] + :param tags: A set of tags. Dictionary of :code:``. + :type tags: dict[str, str] + :param id: + :type id: int + :param app_id: + :type app_id: str + :param app_info: Dictionary of :code:``. + :type app_info: dict[str, str] + :param state: + :type state: str + :param log: + :type log: list[str] + """ + + _attribute_map = { + 'livy_info': {'key': 'livyInfo', 'type': 'LivySessionStateInformation'}, + 'name': {'key': 'name', 'type': 'str'}, + 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, + 'spark_pool_name': {'key': 'sparkPoolName', 'type': 'str'}, + 'submitter_name': {'key': 'submitterName', 'type': 'str'}, + 'submitter_id': {'key': 'submitterId', 'type': 'str'}, + 'artifact_id': {'key': 'artifactId', 'type': 'str'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'result': {'key': 'result', 'type': 'str'}, + 'scheduler_info': {'key': 'schedulerInfo', 'type': 'SchedulerInformation'}, + 'plugin_info': {'key': 'pluginInfo', 'type': 'SparkServicePluginInformation'}, + 'error_info': {'key': 'errorInfo', 'type': '[ErrorInformation]'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'id': {'key': 'id', 'type': 'int'}, + 'app_id': {'key': 'appId', 'type': 'str'}, + 'app_info': {'key': 'appInfo', 'type': '{str}'}, + 'state': {'key': 'state', 'type': 'str'}, + 'log': {'key': 'log', 'type': '[str]'}, + } + + def __init__( + self, + *, + livy_info: Optional["LivySessionStateInformation"] = None, + name: Optional[str] = None, + workspace_name: Optional[str] = None, + spark_pool_name: Optional[str] = None, + submitter_name: Optional[str] = None, + submitter_id: Optional[str] = None, + artifact_id: Optional[str] = None, + job_type: Optional[Union[str, "JobType"]] = None, + result: Optional[Union[str, "JobResult"]] = None, + scheduler_info: Optional["SchedulerInformation"] = None, + plugin_info: Optional["SparkServicePluginInformation"] = None, + error_info: Optional[List["ErrorInformation"]] = None, + tags: Optional[Dict[str, str]] = None, + id: Optional[int] = None, + app_id: Optional[str] = None, + app_info: Optional[Dict[str, str]] = None, + state: Optional[str] = None, + log: Optional[List[str]] = None, + **kwargs + ): + super(ExtendedLivySessionResponse, self).__init__(**kwargs) + self.livy_info = livy_info + self.name = name + self.workspace_name = workspace_name + self.spark_pool_name = spark_pool_name + self.submitter_name = submitter_name + self.submitter_id = submitter_id + self.artifact_id = artifact_id + self.job_type = job_type + self.result = result + self.scheduler_info = scheduler_info + self.plugin_info = plugin_info + self.error_info = error_info + self.tags = tags + self.id = id + self.app_id = app_id + self.app_info = app_info + self.state = state + self.log = log + + +class GetAccessControlInfoRequest(msrest.serialization.Model): + """GetAccessControlInfoRequest. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar artifact_type: Default value: "Workspace". + :vartype artifact_type: str + :param artifact_name: + :type artifact_name: str + """ + + _validation = { + 'artifact_type': {'constant': True}, + } + + _attribute_map = { + 'artifact_type': {'key': 'artifactType', 'type': 'str'}, + 'artifact_name': {'key': 'artifactName', 'type': 'str'}, + } + + artifact_type = "Workspace" + + def __init__( + self, + *, + artifact_name: Optional[str] = None, + **kwargs + ): + super(GetAccessControlInfoRequest, self).__init__(**kwargs) + self.artifact_name = artifact_name + + +class HistoryServerDataResponse(msrest.serialization.Model): + """HistoryServerDataResponse. + + :param data: + :type data: ~azure.synapse.models.Data + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': 'Data'}, + } + + def __init__( + self, + *, + data: Optional["Data"] = None, + **kwargs + ): + super(HistoryServerDataResponse, self).__init__(**kwargs) + self.data = data + + +class HistoryServerDiagnosticResponse(msrest.serialization.Model): + """HistoryServerDiagnosticResponse. + + :param data: + :type data: ~azure.synapse.models.HistoryServerDiagnosticResponseData + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': 'HistoryServerDiagnosticResponseData'}, + } + + def __init__( + self, + *, + data: Optional["HistoryServerDiagnosticResponseData"] = None, + **kwargs + ): + super(HistoryServerDiagnosticResponse, self).__init__(**kwargs) + self.data = data + + +class HistoryServerDiagnosticResponseData(msrest.serialization.Model): + """HistoryServerDiagnosticResponseData. + + :param stages: + :type stages: ~azure.synapse.models.Stages + :param executors: + :type executors: ~azure.synapse.models.Executors + """ + + _attribute_map = { + 'stages': {'key': 'stages', 'type': 'Stages'}, + 'executors': {'key': 'executors', 'type': 'Executors'}, + } + + def __init__( + self, + *, + stages: Optional["Stages"] = None, + executors: Optional["Executors"] = None, + **kwargs + ): + super(HistoryServerDiagnosticResponseData, self).__init__(**kwargs) + self.stages = stages + self.executors = executors + + +class HistoryServerGraphResponse(msrest.serialization.Model): + """HistoryServerGraphResponse. + + :param data: + :type data: ~azure.synapse.models.HistoryServerGraphResponseData + """ + + _attribute_map = { + 'data': {'key': 'data', 'type': 'HistoryServerGraphResponseData'}, + } + + def __init__( + self, + *, + data: Optional["HistoryServerGraphResponseData"] = None, + **kwargs + ): + super(HistoryServerGraphResponse, self).__init__(**kwargs) + self.data = data + + +class HistoryServerGraphResponseData(msrest.serialization.Model): + """HistoryServerGraphResponseData. + + :param is_app_finished: + :type is_app_finished: bool + :param jobs: + :type jobs: ~azure.synapse.models.Jobs + :param stages: + :type stages: ~azure.synapse.models.Stages + """ + + _attribute_map = { + 'is_app_finished': {'key': 'isAppFinished', 'type': 'bool'}, + 'jobs': {'key': 'jobs', 'type': 'Jobs'}, + 'stages': {'key': 'stages', 'type': 'Stages'}, + } + + def __init__( + self, + *, + is_app_finished: Optional[bool] = None, + jobs: Optional["Jobs"] = None, + stages: Optional["Stages"] = None, + **kwargs + ): + super(HistoryServerGraphResponseData, self).__init__(**kwargs) + self.is_app_finished = is_app_finished + self.jobs = jobs + self.stages = stages + + +class HistoryServerPropertiesResponse(msrest.serialization.Model): + """HistoryServerPropertiesResponse. + + :param web_proxy_endpoint: + :type web_proxy_endpoint: str + """ + + _attribute_map = { + 'web_proxy_endpoint': {'key': 'webProxyEndpoint', 'type': 'str'}, + } + + def __init__( + self, + *, + web_proxy_endpoint: Optional[str] = None, + **kwargs + ): + super(HistoryServerPropertiesResponse, self).__init__(**kwargs) + self.web_proxy_endpoint = web_proxy_endpoint + + +class Jobs(msrest.serialization.Model): + """Jobs. + + :param time: + :type time: int + :param start_time: + :type start_time: int + :param id: + :type id: int + :param edges: + :type edges: list[~azure.synapse.models.Edge] + :param nodes: + :type nodes: list[int] + """ + + _attribute_map = { + 'time': {'key': 'time', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'int'}, + 'id': {'key': 'id', 'type': 'int'}, + 'edges': {'key': 'edges', 'type': '[Edge]'}, + 'nodes': {'key': 'nodes', 'type': '[int]'}, + } + + def __init__( + self, + *, + time: Optional[int] = None, + start_time: Optional[int] = None, + id: Optional[int] = None, + edges: Optional[List["Edge"]] = None, + nodes: Optional[List[int]] = None, + **kwargs + ): + super(Jobs, self).__init__(**kwargs) + self.time = time + self.start_time = start_time + self.id = id + self.edges = edges + self.nodes = nodes + + +class LivyBatchStateInformation(msrest.serialization.Model): + """LivyBatchStateInformation. + + :param not_started_at: + :type not_started_at: ~datetime.datetime + :param starting_at: + :type starting_at: ~datetime.datetime + :param running_at: + :type running_at: ~datetime.datetime + :param dead_at: + :type dead_at: ~datetime.datetime + :param success_at: + :type success_at: ~datetime.datetime + :param killed_at: + :type killed_at: ~datetime.datetime + :param recovering_at: + :type recovering_at: ~datetime.datetime + :param current_state: + :type current_state: str + :param job_creation_request: + :type job_creation_request: ~azure.synapse.models.LivyRequestBase + """ + + _attribute_map = { + 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, + 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, + 'running_at': {'key': 'runningAt', 'type': 'iso-8601'}, + 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, + 'success_at': {'key': 'successAt', 'type': 'iso-8601'}, + 'killed_at': {'key': 'killedAt', 'type': 'iso-8601'}, + 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'LivyRequestBase'}, + } + + def __init__( + self, + *, + not_started_at: Optional[datetime.datetime] = None, + starting_at: Optional[datetime.datetime] = None, + running_at: Optional[datetime.datetime] = None, + dead_at: Optional[datetime.datetime] = None, + success_at: Optional[datetime.datetime] = None, + killed_at: Optional[datetime.datetime] = None, + recovering_at: Optional[datetime.datetime] = None, + current_state: Optional[str] = None, + job_creation_request: Optional["LivyRequestBase"] = None, + **kwargs + ): + super(LivyBatchStateInformation, self).__init__(**kwargs) + self.not_started_at = not_started_at + self.starting_at = starting_at + self.running_at = running_at + self.dead_at = dead_at + self.success_at = success_at + self.killed_at = killed_at + self.recovering_at = recovering_at + self.current_state = current_state + self.job_creation_request = job_creation_request + + +class LivyRequestBase(msrest.serialization.Model): + """LivyRequestBase. + + :param name: + :type name: str + :param file: + :type file: str + :param class_name: + :type class_name: str + :param args: + :type args: list[str] + :param jars: + :type jars: list[str] + :param files: + :type files: list[str] + :param archives: + :type archives: list[str] + :param conf: Dictionary of :code:``. + :type conf: dict[str, str] + :param driver_memory: + :type driver_memory: str + :param driver_cores: + :type driver_cores: int + :param executor_memory: + :type executor_memory: str + :param executor_cores: + :type executor_cores: int + :param num_executors: + :type num_executors: int + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'file': {'key': 'file', 'type': 'str'}, + 'class_name': {'key': 'className', 'type': 'str'}, + 'args': {'key': 'args', 'type': '[str]'}, + 'jars': {'key': 'jars', 'type': '[str]'}, + 'files': {'key': 'files', 'type': '[str]'}, + 'archives': {'key': 'archives', 'type': '[str]'}, + 'conf': {'key': 'conf', 'type': '{str}'}, + 'driver_memory': {'key': 'driverMemory', 'type': 'str'}, + 'driver_cores': {'key': 'driverCores', 'type': 'int'}, + 'executor_memory': {'key': 'executorMemory', 'type': 'str'}, + 'executor_cores': {'key': 'executorCores', 'type': 'int'}, + 'num_executors': {'key': 'numExecutors', 'type': 'int'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + file: Optional[str] = None, + class_name: Optional[str] = None, + args: Optional[List[str]] = None, + jars: Optional[List[str]] = None, + files: Optional[List[str]] = None, + archives: Optional[List[str]] = None, + conf: Optional[Dict[str, str]] = None, + driver_memory: Optional[str] = None, + driver_cores: Optional[int] = None, + executor_memory: Optional[str] = None, + executor_cores: Optional[int] = None, + num_executors: Optional[int] = None, + **kwargs + ): + super(LivyRequestBase, self).__init__(**kwargs) + self.name = name + self.file = file + self.class_name = class_name + self.args = args + self.jars = jars + self.files = files + self.archives = archives + self.conf = conf + self.driver_memory = driver_memory + self.driver_cores = driver_cores + self.executor_memory = executor_memory + self.executor_cores = executor_cores + self.num_executors = num_executors + + +class LivySessionStateInformation(msrest.serialization.Model): + """LivySessionStateInformation. + + :param not_started_at: + :type not_started_at: ~datetime.datetime + :param starting_at: + :type starting_at: ~datetime.datetime + :param idle_at: + :type idle_at: ~datetime.datetime + :param dead_at: + :type dead_at: ~datetime.datetime + :param shutting_down_at: + :type shutting_down_at: ~datetime.datetime + :param killed_at: + :type killed_at: ~datetime.datetime + :param recovering_at: + :type recovering_at: ~datetime.datetime + :param busy_at: + :type busy_at: ~datetime.datetime + :param error_at: + :type error_at: ~datetime.datetime + :param current_state: + :type current_state: str + :param job_creation_request: + :type job_creation_request: ~azure.synapse.models.LivyRequestBase + """ + + _attribute_map = { + 'not_started_at': {'key': 'notStartedAt', 'type': 'iso-8601'}, + 'starting_at': {'key': 'startingAt', 'type': 'iso-8601'}, + 'idle_at': {'key': 'idleAt', 'type': 'iso-8601'}, + 'dead_at': {'key': 'deadAt', 'type': 'iso-8601'}, + 'shutting_down_at': {'key': 'shuttingDownAt', 'type': 'iso-8601'}, + 'killed_at': {'key': 'killedAt', 'type': 'iso-8601'}, + 'recovering_at': {'key': 'recoveringAt', 'type': 'iso-8601'}, + 'busy_at': {'key': 'busyAt', 'type': 'iso-8601'}, + 'error_at': {'key': 'errorAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + 'job_creation_request': {'key': 'jobCreationRequest', 'type': 'LivyRequestBase'}, + } + + def __init__( + self, + *, + not_started_at: Optional[datetime.datetime] = None, + starting_at: Optional[datetime.datetime] = None, + idle_at: Optional[datetime.datetime] = None, + dead_at: Optional[datetime.datetime] = None, + shutting_down_at: Optional[datetime.datetime] = None, + killed_at: Optional[datetime.datetime] = None, + recovering_at: Optional[datetime.datetime] = None, + busy_at: Optional[datetime.datetime] = None, + error_at: Optional[datetime.datetime] = None, + current_state: Optional[str] = None, + job_creation_request: Optional["LivyRequestBase"] = None, + **kwargs + ): + super(LivySessionStateInformation, self).__init__(**kwargs) + self.not_started_at = not_started_at + self.starting_at = starting_at + self.idle_at = idle_at + self.dead_at = dead_at + self.shutting_down_at = shutting_down_at + self.killed_at = killed_at + self.recovering_at = recovering_at + self.busy_at = busy_at + self.error_at = error_at + self.current_state = current_state + self.job_creation_request = job_creation_request + + +class LivyStatementCancellationResponse(msrest.serialization.Model): + """LivyStatementCancellationResponse. + + :param msg: + :type msg: str + """ + + _attribute_map = { + 'msg': {'key': 'msg', 'type': 'str'}, + } + + def __init__( + self, + *, + msg: Optional[str] = None, + **kwargs + ): + super(LivyStatementCancellationResponse, self).__init__(**kwargs) + self.msg = msg + + +class LivyStatementOutput(msrest.serialization.Model): + """LivyStatementOutput. + + :param status: + :type status: str + :param execution_count: + :type execution_count: int + :param data: + :type data: ~azure.synapse.models.LivyStatementOutputData + :param ename: + :type ename: str + :param evalue: + :type evalue: str + :param traceback: + :type traceback: list[str] + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'execution_count': {'key': 'execution_count', 'type': 'int'}, + 'data': {'key': 'data', 'type': 'LivyStatementOutputData'}, + 'ename': {'key': 'ename', 'type': 'str'}, + 'evalue': {'key': 'evalue', 'type': 'str'}, + 'traceback': {'key': 'traceback', 'type': '[str]'}, + } + + def __init__( + self, + *, + status: Optional[str] = None, + execution_count: Optional[int] = None, + data: Optional["LivyStatementOutputData"] = None, + ename: Optional[str] = None, + evalue: Optional[str] = None, + traceback: Optional[List[str]] = None, + **kwargs + ): + super(LivyStatementOutput, self).__init__(**kwargs) + self.status = status + self.execution_count = execution_count + self.data = data + self.ename = ename + self.evalue = evalue + self.traceback = traceback + + +class LivyStatementOutputData(msrest.serialization.Model): + """LivyStatementOutputData. + + """ + + _attribute_map = { + } + + def __init__( + self, + **kwargs + ): + super(LivyStatementOutputData, self).__init__(**kwargs) + + +class LivyStatementRequestBody(msrest.serialization.Model): + """LivyStatementRequestBody. + + :param code: + :type code: str + :param kind: + :type kind: str + """ + + _attribute_map = { + 'code': {'key': 'code', 'type': 'str'}, + 'kind': {'key': 'kind', 'type': 'str'}, + } + + def __init__( + self, + *, + code: Optional[str] = None, + kind: Optional[str] = None, + **kwargs + ): + super(LivyStatementRequestBody, self).__init__(**kwargs) + self.code = code + self.kind = kind + + +class LivyStatementResponseBody(msrest.serialization.Model): + """LivyStatementResponseBody. + + :param id: + :type id: int + :param code: + :type code: str + :param state: + :type state: str + :param output: + :type output: ~azure.synapse.models.LivyStatementOutput + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'int'}, + 'code': {'key': 'code', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'output': {'key': 'output', 'type': 'LivyStatementOutput'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + code: Optional[str] = None, + state: Optional[str] = None, + output: Optional["LivyStatementOutput"] = None, + **kwargs + ): + super(LivyStatementResponseBody, self).__init__(**kwargs) + self.id = id + self.code = code + self.state = state + self.output = output + + +class LivyStatementsResponseBody(msrest.serialization.Model): + """LivyStatementsResponseBody. + + :param total_statements: + :type total_statements: int + :param statements: + :type statements: list[~azure.synapse.models.LivyStatementResponseBody] + """ + + _attribute_map = { + 'total_statements': {'key': 'total_statements', 'type': 'int'}, + 'statements': {'key': 'statements', 'type': '[LivyStatementResponseBody]'}, + } + + def __init__( + self, + *, + total_statements: Optional[int] = None, + statements: Optional[List["LivyStatementResponseBody"]] = None, + **kwargs + ): + super(LivyStatementsResponseBody, self).__init__(**kwargs) + self.total_statements = total_statements + self.statements = statements + + +class Option(msrest.serialization.Model): + """Option. + + :param additional_prop1: + :type additional_prop1: str + :param additional_prop2: + :type additional_prop2: str + :param additional_prop3: + :type additional_prop3: str + """ + + _attribute_map = { + 'additional_prop1': {'key': 'additionalProp1', 'type': 'str'}, + 'additional_prop2': {'key': 'additionalProp2', 'type': 'str'}, + 'additional_prop3': {'key': 'additionalProp3', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_prop1: Optional[str] = None, + additional_prop2: Optional[str] = None, + additional_prop3: Optional[str] = None, + **kwargs + ): + super(Option, self).__init__(**kwargs) + self.additional_prop1 = additional_prop1 + self.additional_prop2 = additional_prop2 + self.additional_prop3 = additional_prop3 + + +class SchedulerInformation(msrest.serialization.Model): + """SchedulerInformation. + + :param submitted_at: + :type submitted_at: ~datetime.datetime + :param scheduled_at: + :type scheduled_at: ~datetime.datetime + :param ended_at: + :type ended_at: ~datetime.datetime + :param cancellation_requested_at: + :type cancellation_requested_at: ~datetime.datetime + :param current_state: Possible values include: 'Queued', 'Scheduled', 'Ended'. + :type current_state: str or ~azure.synapse.models.SchedulerCurrentState + """ + + _attribute_map = { + 'submitted_at': {'key': 'submittedAt', 'type': 'iso-8601'}, + 'scheduled_at': {'key': 'scheduledAt', 'type': 'iso-8601'}, + 'ended_at': {'key': 'endedAt', 'type': 'iso-8601'}, + 'cancellation_requested_at': {'key': 'cancellationRequestedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + *, + submitted_at: Optional[datetime.datetime] = None, + scheduled_at: Optional[datetime.datetime] = None, + ended_at: Optional[datetime.datetime] = None, + cancellation_requested_at: Optional[datetime.datetime] = None, + current_state: Optional[Union[str, "SchedulerCurrentState"]] = None, + **kwargs + ): + super(SchedulerInformation, self).__init__(**kwargs) + self.submitted_at = submitted_at + self.scheduled_at = scheduled_at + self.ended_at = ended_at + self.cancellation_requested_at = cancellation_requested_at + self.current_state = current_state + + +class SetWorkspaceAdministratorsRequest(msrest.serialization.Model): + """SetWorkspaceAdministratorsRequest. + + :param administrators: + :type administrators: list[str] + """ + + _attribute_map = { + 'administrators': {'key': 'administrators', 'type': '[str]'}, + } + + def __init__( + self, + *, + administrators: Optional[List[str]] = None, + **kwargs + ): + super(SetWorkspaceAdministratorsRequest, self).__init__(**kwargs) + self.administrators = administrators + + +class SparkJob(msrest.serialization.Model): + """SparkJob. + + :param state: + :type state: str + :param name: + :type name: str + :param submitter: + :type submitter: str + :param compute: + :type compute: str + :param spark_application_id: + :type spark_application_id: str + :param livy_id: + :type livy_id: str + :param timing: + :type timing: list[str] + :param spark_job_definition: + :type spark_job_definition: str + :param pipeline: + :type pipeline: list[~azure.synapse.models.SparkJob] + :param job_type: + :type job_type: str + :param submit_time: + :type submit_time: ~datetime.datetime + :param end_time: + :type end_time: ~datetime.datetime + :param queued_duration: + :type queued_duration: str + :param running_duration: + :type running_duration: str + :param total_duration: + :type total_duration: str + """ + + _attribute_map = { + 'state': {'key': 'state', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'submitter': {'key': 'submitter', 'type': 'str'}, + 'compute': {'key': 'compute', 'type': 'str'}, + 'spark_application_id': {'key': 'sparkApplicationId', 'type': 'str'}, + 'livy_id': {'key': 'livyId', 'type': 'str'}, + 'timing': {'key': 'timing', 'type': '[str]'}, + 'spark_job_definition': {'key': 'sparkJobDefinition', 'type': 'str'}, + 'pipeline': {'key': 'pipeline', 'type': '[SparkJob]'}, + 'job_type': {'key': 'jobType', 'type': 'str'}, + 'submit_time': {'key': 'submitTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'queued_duration': {'key': 'queuedDuration', 'type': 'str'}, + 'running_duration': {'key': 'runningDuration', 'type': 'str'}, + 'total_duration': {'key': 'totalDuration', 'type': 'str'}, + } + + def __init__( + self, + *, + state: Optional[str] = None, + name: Optional[str] = None, + submitter: Optional[str] = None, + compute: Optional[str] = None, + spark_application_id: Optional[str] = None, + livy_id: Optional[str] = None, + timing: Optional[List[str]] = None, + spark_job_definition: Optional[str] = None, + pipeline: Optional[List["SparkJob"]] = None, + job_type: Optional[str] = None, + submit_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + queued_duration: Optional[str] = None, + running_duration: Optional[str] = None, + total_duration: Optional[str] = None, + **kwargs + ): + super(SparkJob, self).__init__(**kwargs) + self.state = state + self.name = name + self.submitter = submitter + self.compute = compute + self.spark_application_id = spark_application_id + self.livy_id = livy_id + self.timing = timing + self.spark_job_definition = spark_job_definition + self.pipeline = pipeline + self.job_type = job_type + self.submit_time = submit_time + self.end_time = end_time + self.queued_duration = queued_duration + self.running_duration = running_duration + self.total_duration = total_duration + + +class SparkJobListViewResponse(msrest.serialization.Model): + """SparkJobListViewResponse. + + :param n_jobs: + :type n_jobs: int + :param spark_jobs: + :type spark_jobs: list[~azure.synapse.models.SparkJob] + """ + + _attribute_map = { + 'n_jobs': {'key': 'nJobs', 'type': 'int'}, + 'spark_jobs': {'key': 'sparkJobs', 'type': '[SparkJob]'}, + } + + def __init__( + self, + *, + n_jobs: Optional[int] = None, + spark_jobs: Optional[List["SparkJob"]] = None, + **kwargs + ): + super(SparkJobListViewResponse, self).__init__(**kwargs) + self.n_jobs = n_jobs + self.spark_jobs = spark_jobs + + +class SparkServicePluginInformation(msrest.serialization.Model): + """SparkServicePluginInformation. + + :param preparation_started_at: + :type preparation_started_at: ~datetime.datetime + :param resource_acquisition_started_at: + :type resource_acquisition_started_at: ~datetime.datetime + :param submission_started_at: + :type submission_started_at: ~datetime.datetime + :param monitoring_started_at: + :type monitoring_started_at: ~datetime.datetime + :param cleanup_started_at: + :type cleanup_started_at: ~datetime.datetime + :param current_state: Possible values include: 'Preparation', 'ResourceAcquisition', 'Queued', + 'Submission', 'Monitoring', 'Cleanup', 'Ended'. + :type current_state: str or ~azure.synapse.models.PluginCurrentState + """ + + _attribute_map = { + 'preparation_started_at': {'key': 'preparationStartedAt', 'type': 'iso-8601'}, + 'resource_acquisition_started_at': {'key': 'resourceAcquisitionStartedAt', 'type': 'iso-8601'}, + 'submission_started_at': {'key': 'submissionStartedAt', 'type': 'iso-8601'}, + 'monitoring_started_at': {'key': 'monitoringStartedAt', 'type': 'iso-8601'}, + 'cleanup_started_at': {'key': 'cleanupStartedAt', 'type': 'iso-8601'}, + 'current_state': {'key': 'currentState', 'type': 'str'}, + } + + def __init__( + self, + *, + preparation_started_at: Optional[datetime.datetime] = None, + resource_acquisition_started_at: Optional[datetime.datetime] = None, + submission_started_at: Optional[datetime.datetime] = None, + monitoring_started_at: Optional[datetime.datetime] = None, + cleanup_started_at: Optional[datetime.datetime] = None, + current_state: Optional[Union[str, "PluginCurrentState"]] = None, + **kwargs + ): + super(SparkServicePluginInformation, self).__init__(**kwargs) + self.preparation_started_at = preparation_started_at + self.resource_acquisition_started_at = resource_acquisition_started_at + self.submission_started_at = submission_started_at + self.monitoring_started_at = monitoring_started_at + self.cleanup_started_at = cleanup_started_at + self.current_state = current_state + + +class Stages(msrest.serialization.Model): + """Stages. + + :param data_avg: + :type data_avg: int + :param time_avg: + :type time_avg: int + :param id: + :type id: int + :param attempt_id: + :type attempt_id: int + :param name: + :type name: str + :param data_skew_task: + :type data_skew_task: list[int] + :param time_skew_task: + :type time_skew_task: list[int] + :param tasks: + :type tasks: list[int] + """ + + _attribute_map = { + 'data_avg': {'key': 'dataAvg', 'type': 'int'}, + 'time_avg': {'key': 'timeAvg', 'type': 'int'}, + 'id': {'key': 'id', 'type': 'int'}, + 'attempt_id': {'key': 'attemptId', 'type': 'int'}, + 'name': {'key': 'name', 'type': 'str'}, + 'data_skew_task': {'key': 'dataSkewTask', 'type': '[int]'}, + 'time_skew_task': {'key': 'timeSkewTask', 'type': '[int]'}, + 'tasks': {'key': 'tasks', 'type': '[int]'}, + } + + def __init__( + self, + *, + data_avg: Optional[int] = None, + time_avg: Optional[int] = None, + id: Optional[int] = None, + attempt_id: Optional[int] = None, + name: Optional[str] = None, + data_skew_task: Optional[List[int]] = None, + time_skew_task: Optional[List[int]] = None, + tasks: Optional[List[int]] = None, + **kwargs + ): + super(Stages, self).__init__(**kwargs) + self.data_avg = data_avg + self.time_avg = time_avg + self.id = id + self.attempt_id = attempt_id + self.name = name + self.data_skew_task = data_skew_task + self.time_skew_task = time_skew_task + self.tasks = tasks + + +class Tables(msrest.serialization.Model): + """Tables. + + :param operation: + :type operation: str + :param options: + :type options: list[~azure.synapse.models.Option] + """ + + _attribute_map = { + 'operation': {'key': 'operation', 'type': 'str'}, + 'options': {'key': 'options', 'type': '[Option]'}, + } + + def __init__( + self, + *, + operation: Optional[str] = None, + options: Optional[List["Option"]] = None, + **kwargs + ): + super(Tables, self).__init__(**kwargs) + self.operation = operation + self.options = options + + +class WorkspaceAccessControlResponse(msrest.serialization.Model): + """WorkspaceAccessControlResponse. + + :param administrators: + :type administrators: list[str] + :param etag: + :type etag: str + :param access_control_list: Dictionary of + . + :type access_control_list: dict[str, list[str]] + """ + + _attribute_map = { + 'administrators': {'key': 'administrators', 'type': '[str]'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'access_control_list': {'key': 'accessControlList', 'type': '{[str]}'}, + } + + def __init__( + self, + *, + administrators: Optional[List[str]] = None, + etag: Optional[str] = None, + access_control_list: Optional[Dict[str, List[str]]] = None, + **kwargs + ): + super(WorkspaceAccessControlResponse, self).__init__(**kwargs) + self.administrators = administrators + self.etag = etag + self.access_control_list = access_control_list diff --git a/sdk/synapse/azure-synapse/azure/synapse/models/_synapse_client_enums.py b/sdk/synapse/azure-synapse/azure/synapse/models/_synapse_client_enums.py new file mode 100644 index 000000000000..d9a39a16bb63 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/models/_synapse_client_enums.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + +class ErrorSource(str, Enum): + + system = "System" + user = "User" + unknown = "Unknown" + dependency = "Dependency" + +class JobType(str, Enum): + + spark_batch = "SparkBatch" + spark_session = "SparkSession" + +class JobResult(str, Enum): + + uncertain = "Uncertain" + succeeded = "Succeeded" + failed = "Failed" + cancelled = "Cancelled" + +class SchedulerCurrentState(str, Enum): + + queued = "Queued" + scheduled = "Scheduled" + ended = "Ended" + +class PluginCurrentState(str, Enum): + + preparation = "Preparation" + resource_acquisition = "ResourceAcquisition" + queued = "Queued" + submission = "Submission" + monitoring = "Monitoring" + cleanup = "Cleanup" + ended = "Ended" diff --git a/sdk/synapse/azure-synapse/azure/synapse/operations/__init__.py b/sdk/synapse/azure-synapse/azure/synapse/operations/__init__.py new file mode 100644 index 000000000000..93ca320e81e5 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/operations/__init__.py @@ -0,0 +1,19 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from ._monitoring_operations import MonitoringOperations +from ._spark_batch_operations import SparkBatchOperations +from ._spark_session_operations import SparkSessionOperations +from ._workspace_acl_operations import WorkspaceAclOperations + +__all__ = [ + 'MonitoringOperations', + 'SparkBatchOperations', + 'SparkSessionOperations', + 'WorkspaceAclOperations', +] diff --git a/sdk/synapse/azure-synapse/azure/synapse/operations/_monitoring_operations.py b/sdk/synapse/azure-synapse/azure/synapse/operations/_monitoring_operations.py new file mode 100644 index 000000000000..92fee9bc66f1 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/operations/_monitoring_operations.py @@ -0,0 +1,419 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class MonitoringOperations(object): + """MonitoringOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get_history_server_data( + self, + workspace_name, # type: str + pool_name, # type: str + livy_id, # type: str + app_id, # type: str + attempt_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.HistoryServerDataResponse" + """Get History Server Data for a given workspace, pool, livyid, appid and attemptId. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :param app_id: The application id. + :type app_id: str + :param attempt_id: The attempt id. + :type attempt_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerDataResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerDataResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.HistoryServerDataResponse"] + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_data.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + 'appId': self._serialize.url("app_id", app_id, 'str'), + 'attemptId': self._serialize.url("attempt_id", attempt_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerDataResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_data.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}/applications/{appId}/attemptIds/{attemptId}/historyServerData'} + + def get_spark_job_list( + self, + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SparkJobListViewResponse" + """Get list of spark applications for the workspace. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SparkJobListViewResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.SparkJobListViewResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.SparkJobListViewResponse"] + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_spark_job_list.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SparkJobListViewResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_spark_job_list.metadata = {'url': '/monitoring/workloadTypes/spark/Applications'} + + def get_application_details( + self, + workspace_name, # type: str + pool_name, # type: str + livy_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.SparkJobListViewResponse" + """Get one spark application details given the workspace name, pool name and livyid. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: SparkJobListViewResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.SparkJobListViewResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.SparkJobListViewResponse"] + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_application_details.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('SparkJobListViewResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_application_details.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}'} + + def get_history_server_properties( + self, + workspace_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.HistoryServerPropertiesResponse" + """Get History server properties. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerPropertiesResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerPropertiesResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.HistoryServerPropertiesResponse"] + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_properties.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerPropertiesResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_properties.metadata = {'url': '/monitoring/workloadTypes/spark/historyServerProperties'} + + def get_history_server_diagnostic( + self, + workspace_name, # type: str + pool_name, # type: str + livy_id, # type: str + app_id, # type: str + attempt_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.HistoryServerDiagnosticResponse" + """Get History Server Diagnostic Data for a given workspace, pool, livyid, appid and attemptId. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :param app_id: The application id. + :type app_id: str + :param attempt_id: The attempt id. + :type attempt_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerDiagnosticResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerDiagnosticResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.HistoryServerDiagnosticResponse"] + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_diagnostic.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + 'appId': self._serialize.url("app_id", app_id, 'str'), + 'attemptId': self._serialize.url("attempt_id", attempt_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerDiagnosticResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_diagnostic.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}/applications/{appId}/attemptIds/{attemptId}/historyServerDiagnostic'} + + def get_history_server_graph( + self, + workspace_name, # type: str + pool_name, # type: str + livy_id, # type: str + app_id, # type: str + attempt_id, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.HistoryServerGraphResponse" + """Get History Server Graph Data for a given workspace, pool, livyid, appid and attemptId. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param pool_name: The spark pool name. + :type pool_name: str + :param livy_id: The livy id. + :type livy_id: str + :param app_id: The application id. + :type app_id: str + :param attempt_id: The attempt id. + :type attempt_id: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: HistoryServerGraphResponse or or the result of cls(response) + :rtype: ~azure.synapse.models.HistoryServerGraphResponse or None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.HistoryServerGraphResponse"] + error_map = kwargs.pop('error_map', {}) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_history_server_graph.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'poolName': self._serialize.url("pool_name", pool_name, 'str'), + 'livyId': self._serialize.url("livy_id", livy_id, 'str'), + 'appId': self._serialize.url("app_id", app_id, 'str'), + 'attemptId': self._serialize.url("attempt_id", attempt_id, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 401]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('HistoryServerGraphResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_history_server_graph.metadata = {'url': '/monitoring/workloadTypes/spark/pools/{poolName}/livyIds/{livyId}/applications/{appId}/attemptIds/{attemptId}/historyServerGraph'} diff --git a/sdk/synapse/azure-synapse/azure/synapse/operations/_spark_batch_operations.py b/sdk/synapse/azure-synapse/azure/synapse/operations/_spark_batch_operations.py new file mode 100644 index 000000000000..6e37227065fb --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/operations/_spark_batch_operations.py @@ -0,0 +1,299 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SparkBatchOperations(object): + """SparkBatchOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + workspace_name, # type: str + spark_pool_name, # type: str + from_parameter=None, # type: Optional[int] + size=None, # type: Optional[int] + detailed=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> "models.ExtendedLivyListBatchResponse" + """List all spark batch jobs which are running under a particular spark pool. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param from_parameter: Optional param specifying which index the list should begin from. + :type from_parameter: int + :param size: Optional param specifying the size of the returned list. + By default it is 20 and that is the maximum. + :type size: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyListBatchResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyListBatchResponse + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.ExtendedLivyListBatchResponse"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if from_parameter is not None: + query_parameters['from'] = self._serialize.query("from_parameter", from_parameter, 'int') + if size is not None: + query_parameters['size'] = self._serialize.query("size", size, 'int') + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyListBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches'} + + def create( + self, + workspace_name, # type: str + spark_pool_name, # type: str + livy_request, # type: "models.ExtendedLivyBatchRequest" + detailed=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> "models.ExtendedLivyBatchResponse" + """Create new spark batch job. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param livy_request: Livy compatible batch job request payload. + :type livy_request: ~azure.synapse.models.ExtendedLivyBatchRequest + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyBatchResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyBatchResponse + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.ExtendedLivyBatchResponse"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(livy_request, 'ExtendedLivyBatchRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches'} + + def get( + self, + workspace_name, # type: str + spark_pool_name, # type: str + batch_id, # type: int + detailed=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> "models.ExtendedLivyBatchResponse" + """Gets a single spark batch job. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param batch_id: Identifier for the batch job. + :type batch_id: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyBatchResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyBatchResponse + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.ExtendedLivyBatchResponse"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'batchId': self._serialize.url("batch_id", batch_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyBatchResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}'} + + def delete( + self, + workspace_name, # type: str + spark_pool_name, # type: str + batch_id, # type: int + **kwargs # type: Any + ): + # type: (...) -> None + """Cancels a running spark batch job. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param batch_id: Identifier for the batch job. + :type batch_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType[None] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'batchId': self._serialize.url("batch_id", batch_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + + # Construct headers + header_parameters = {} + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches/{batchId}'} diff --git a/sdk/synapse/azure-synapse/azure/synapse/operations/_spark_session_operations.py b/sdk/synapse/azure-synapse/azure/synapse/operations/_spark_session_operations.py new file mode 100644 index 000000000000..b7f8bc6152d8 --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/operations/_spark_session_operations.py @@ -0,0 +1,605 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, Optional, TypeVar +import warnings + +from azure.core.exceptions import HttpResponseError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class SparkSessionOperations(object): + """SparkSessionOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list( + self, + workspace_name, # type: str + spark_pool_name, # type: str + from_parameter=None, # type: Optional[int] + size=None, # type: Optional[int] + detailed=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> "models.ExtendedLivyListSessionResponse" + """List all spark sessions which are running under a particular spark pool. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param from_parameter: Optional param specifying which index the list should begin from. + :type from_parameter: int + :param size: Optional param specifying the size of the returned list. + By default it is 20 and that is the maximum. + :type size: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivyListSessionResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivyListSessionResponse + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.ExtendedLivyListSessionResponse"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.list.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if from_parameter is not None: + query_parameters['from'] = self._serialize.query("from_parameter", from_parameter, 'int') + if size is not None: + query_parameters['size'] = self._serialize.query("size", size, 'int') + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivyListSessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions'} + + def create( + self, + workspace_name, # type: str + spark_pool_name, # type: str + livy_request, # type: "models.ExtendedLivySessionRequest" + detailed=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> "models.ExtendedLivySessionResponse" + """Create new spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param livy_request: Livy compatible batch job request payload. + :type livy_request: ~azure.synapse.models.ExtendedLivySessionRequest + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivySessionResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivySessionResponse + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.ExtendedLivySessionResponse"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.create.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(livy_request, 'ExtendedLivySessionRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivySessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions'} + + def get( + self, + workspace_name, # type: str + spark_pool_name, # type: str + session_id, # type: int + detailed=None, # type: Optional[bool] + **kwargs # type: Any + ): + # type: (...) -> "models.ExtendedLivySessionResponse" + """Gets a single spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param detailed: Optional query param specifying whether detailed response is returned beyond + plain livy. + :type detailed: bool + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ExtendedLivySessionResponse or the result of cls(response) + :rtype: ~azure.synapse.models.ExtendedLivySessionResponse + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.ExtendedLivySessionResponse"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + if detailed is not None: + query_parameters['detailed'] = self._serialize.query("detailed", detailed, 'bool') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('ExtendedLivySessionResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}'} + + def delete( + self, + workspace_name, # type: str + spark_pool_name, # type: str + session_id, # type: int + **kwargs # type: Any + ): + # type: (...) -> None + """Cancels a running spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType[None] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + + # Construct headers + header_parameters = {} + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}'} + + def reset_timeout( + self, + workspace_name, # type: str + spark_pool_name, # type: str + session_id, # type: int + **kwargs # type: Any + ): + # type: (...) -> None + """Sends a keep alive call to the current session to reset the session timeout. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType[None] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.reset_timeout.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + + # Construct headers + header_parameters = {} + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + if cls: + return cls(pipeline_response, None, {}) + + reset_timeout.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/reset-timeout'} + + def list_statements( + self, + workspace_name, # type: str + spark_pool_name, # type: str + session_id, # type: int + **kwargs # type: Any + ): + # type: (...) -> "models.LivyStatementsResponseBody" + """Gets a list of statements within a spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementsResponseBody or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementsResponseBody + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.LivyStatementsResponseBody"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.list_statements.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementsResponseBody', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_statements.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements'} + + def create_statement( + self, + workspace_name, # type: str + spark_pool_name, # type: str + session_id, # type: int + livy_request, # type: "models.LivyStatementRequestBody" + **kwargs # type: Any + ): + # type: (...) -> "models.LivyStatementResponseBody" + """Create statement within a spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param livy_request: Livy compatible batch job request payload. + :type livy_request: ~azure.synapse.models.LivyStatementRequestBody + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementResponseBody or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementResponseBody + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.LivyStatementResponseBody"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.create_statement.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(livy_request, 'LivyStatementRequestBody') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementResponseBody', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_statement.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements'} + + def get_statement( + self, + workspace_name, # type: str + spark_pool_name, # type: str + session_id, # type: int + statement_id, # type: int + **kwargs # type: Any + ): + # type: (...) -> "models.LivyStatementResponseBody" + """Gets a single statement within a spark session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param statement_id: Identifier for the statement. + :type statement_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementResponseBody or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementResponseBody + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.LivyStatementResponseBody"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.get_statement.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + 'statementId': self._serialize.url("statement_id", statement_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementResponseBody', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_statement.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}'} + + def delete_statement( + self, + workspace_name, # type: str + spark_pool_name, # type: str + session_id, # type: int + statement_id, # type: int + **kwargs # type: Any + ): + # type: (...) -> "models.LivyStatementCancellationResponse" + """Kill a statement within a session. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param spark_pool_name: Name of the spark pool. "ondemand" targets the ondemand pool. + :type spark_pool_name: str + :param session_id: Identifier for the session. + :type session_id: int + :param statement_id: Identifier for the statement. + :type statement_id: int + :keyword callable cls: A custom type or function that will be passed the direct response + :return: LivyStatementCancellationResponse or the result of cls(response) + :rtype: ~azure.synapse.models.LivyStatementCancellationResponse + :raises: ~azure.core.HttpResponseError + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.LivyStatementCancellationResponse"] + error_map = kwargs.pop('error_map', {}) + + # Construct URL + url = self.delete_statement.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + 'livyApiVersion': self._serialize.url("self._config.livy_api_version", self._config.livy_api_version, 'str', skip_quote=True), + 'sparkPoolName': self._serialize.url("spark_pool_name", spark_pool_name, 'str'), + 'sessionId': self._serialize.url("session_id", session_id, 'int'), + 'statementId': self._serialize.url("statement_id", statement_id, 'int'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response) + + deserialized = self._deserialize('LivyStatementCancellationResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + delete_statement.metadata = {'url': '/livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/sessions/{sessionId}/statements/{statementId}/cancel'} diff --git a/sdk/synapse/azure-synapse/azure/synapse/operations/_workspace_acl_operations.py b/sdk/synapse/azure-synapse/azure/synapse/operations/_workspace_acl_operations.py new file mode 100644 index 000000000000..8fc643a6999f --- /dev/null +++ b/sdk/synapse/azure-synapse/azure/synapse/operations/_workspace_acl_operations.py @@ -0,0 +1,161 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar +import warnings + +from azure.core.exceptions import map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse + +from .. import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class WorkspaceAclOperations(object): + """WorkspaceAclOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.synapse.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def get_access_control_info( + self, + workspace_name, # type: str + artifact_name=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.WorkspaceAccessControlResponse" + """Get access control info. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param artifact_name: + :type artifact_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceAccessControlResponse or the result of cls(response) + :rtype: ~azure.synapse.models.WorkspaceAccessControlResponse + :raises: ~azure.synapse.models.ErrorResponseException: + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.WorkspaceAccessControlResponse"] + error_map = kwargs.pop('error_map', {}) + + _resource = models.GetAccessControlInfoRequest(artifact_name=artifact_name) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.get_access_control_info.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(_resource, 'GetAccessControlInfoRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise models.ErrorResponseException.from_response(response, self._deserialize) + + deserialized = self._deserialize('WorkspaceAccessControlResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_access_control_info.metadata = {'url': '/getAccessControl'} + + def set_workspace_administrators( + self, + workspace_name, # type: str + administrators=None, # type: Optional[List[str]] + **kwargs # type: Any + ): + # type: (...) -> "models.WorkspaceAccessControlResponse" + """Replace Admins of the Workspace. + + :param workspace_name: The name of the workspace to execute operations on. + :type workspace_name: str + :param administrators: + :type administrators: list[str] + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceAccessControlResponse or the result of cls(response) + :rtype: ~azure.synapse.models.WorkspaceAccessControlResponse + :raises: ~azure.synapse.models.ErrorResponseException: + """ + cls = kwargs.pop('cls', None ) # type: ClsType["models.WorkspaceAccessControlResponse"] + error_map = kwargs.pop('error_map', {}) + + _request = models.SetWorkspaceAdministratorsRequest(administrators=administrators) + api_version = "2019-11-01-preview" + + # Construct URL + url = self.set_workspace_administrators.metadata['url'] + path_format_arguments = { + 'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str', skip_quote=True), + 'SynapseDnsSuffix': self._serialize.url("self._config.synapse_dns_suffix", self._config.synapse_dns_suffix, 'str', skip_quote=True), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json' + + # Construct body + body_content = self._serialize.body(_request, 'SetWorkspaceAdministratorsRequest') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise models.ErrorResponseException.from_response(response, self._deserialize) + + deserialized = self._deserialize('WorkspaceAccessControlResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + set_workspace_administrators.metadata = {'url': '/setWorkspaceAdmins'} diff --git a/sdk/synapse/azure-synapse/sdk_packaging.toml b/sdk/synapse/azure-synapse/sdk_packaging.toml new file mode 100644 index 000000000000..f102f12cdb51 --- /dev/null +++ b/sdk/synapse/azure-synapse/sdk_packaging.toml @@ -0,0 +1,9 @@ +[packaging] +package_name = "azure-synapse" +package_nspkg = "azure-nspkg" +package_pprint_name = "Synapse" +package_doc_id = "" +is_stable = false +is_arm = false +need_msrestazure = false +auto_update = false diff --git a/sdk/synapse/azure-synapse/setup.cfg b/sdk/synapse/azure-synapse/setup.cfg new file mode 100644 index 000000000000..3c6e79cf31da --- /dev/null +++ b/sdk/synapse/azure-synapse/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal=1 diff --git a/sdk/synapse/azure-synapse/setup.py b/sdk/synapse/azure-synapse/setup.py new file mode 100644 index 000000000000..3e256badfb32 --- /dev/null +++ b/sdk/synapse/azure-synapse/setup.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python + +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +#-------------------------------------------------------------------------- + +import re +import os.path +from io import open +from setuptools import find_packages, setup + +# Change the PACKAGE_NAME only to change folder and different name +PACKAGE_NAME = "azure-synapse" +PACKAGE_PPRINT_NAME = "Synapse" + +# a-b-c => a/b/c +package_folder_path = PACKAGE_NAME.replace('-', '/') +# a-b-c => a.b.c +namespace_name = PACKAGE_NAME.replace('-', '.') + +# azure v0.x is not compatible with this package +# azure v0.x used to have a __version__ attribute (newer versions don't) +try: + import azure + try: + ver = azure.__version__ + raise Exception( + 'This package is incompatible with azure=={}. '.format(ver) + + 'Uninstall it with "pip uninstall azure".' + ) + except AttributeError: + pass +except ImportError: + pass + +# Version extraction inspired from 'requests' +with open(os.path.join(package_folder_path, '_version.py'), 'r') as fd: + version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]', + fd.read(), re.MULTILINE).group(1) + +if not version: + raise RuntimeError('Cannot find version information') + +with open('README.md', encoding='utf-8') as f: + readme = f.read() +with open('CHANGELOG.md', encoding='utf-8') as f: + changelog = f.read() + +setup( + name=PACKAGE_NAME, + version=version, + description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME), + long_description=readme + '\n\n' + changelog, + long_description_content_type='text/markdown', + license='MIT License', + author='Microsoft Corporation', + author_email='azpysdkhelp@microsoft.com', + url='https://github.com/Azure/azure-sdk-for-python', + classifiers=[ + 'Development Status :: 4 - Beta', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'License :: OSI Approved :: MIT License', + ], + zip_safe=False, + packages=find_packages(exclude=[ + 'tests', + # Exclude packages that will be covered by PEP420 or nspkg + 'azure', + ]), + install_requires=[ + 'msrest>=0.5.0', + "azure-core<2.0.0,>=1.2.2", + ], + extras_require={ + ":python_version<'3.0'": ['azure-nspkg'], + } +) From d195be541077d91e6f2cf8967f53836ce4a37343 Mon Sep 17 00:00:00 2001 From: Laurent Mazuel Date: Tue, 18 Feb 2020 13:44:11 -0800 Subject: [PATCH 2/3] Fix empty model generation --- .../azure/synapse/models/__init__.py | 3 --- .../azure/synapse/models/_models.py | 19 ++--------------- .../azure/synapse/models/_models_py3.py | 21 +++---------------- 3 files changed, 5 insertions(+), 38 deletions(-) diff --git a/sdk/synapse/azure-synapse/azure/synapse/models/__init__.py b/sdk/synapse/azure-synapse/azure/synapse/models/__init__.py index 89b7cd50b3f7..98361330420e 100644 --- a/sdk/synapse/azure-synapse/azure/synapse/models/__init__.py +++ b/sdk/synapse/azure-synapse/azure/synapse/models/__init__.py @@ -33,7 +33,6 @@ from ._models_py3 import LivySessionStateInformation from ._models_py3 import LivyStatementCancellationResponse from ._models_py3 import LivyStatementOutput - from ._models_py3 import LivyStatementOutputData from ._models_py3 import LivyStatementRequestBody from ._models_py3 import LivyStatementResponseBody from ._models_py3 import LivyStatementsResponseBody @@ -73,7 +72,6 @@ from ._models import LivySessionStateInformation # type: ignore from ._models import LivyStatementCancellationResponse # type: ignore from ._models import LivyStatementOutput # type: ignore - from ._models import LivyStatementOutputData # type: ignore from ._models import LivyStatementRequestBody # type: ignore from ._models import LivyStatementResponseBody # type: ignore from ._models import LivyStatementsResponseBody # type: ignore @@ -121,7 +119,6 @@ 'LivySessionStateInformation', 'LivyStatementCancellationResponse', 'LivyStatementOutput', - 'LivyStatementOutputData', 'LivyStatementRequestBody', 'LivyStatementResponseBody', 'LivyStatementsResponseBody', diff --git a/sdk/synapse/azure-synapse/azure/synapse/models/_models.py b/sdk/synapse/azure-synapse/azure/synapse/models/_models.py index f6ddce541a67..e363a1f5919f 100644 --- a/sdk/synapse/azure-synapse/azure/synapse/models/_models.py +++ b/sdk/synapse/azure-synapse/azure/synapse/models/_models.py @@ -1028,7 +1028,7 @@ class LivyStatementOutput(msrest.serialization.Model): :param execution_count: :type execution_count: int :param data: - :type data: ~azure.synapse.models.LivyStatementOutputData + :type data: object :param ename: :type ename: str :param evalue: @@ -1040,7 +1040,7 @@ class LivyStatementOutput(msrest.serialization.Model): _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'execution_count': {'key': 'execution_count', 'type': 'int'}, - 'data': {'key': 'data', 'type': 'LivyStatementOutputData'}, + 'data': {'key': 'data', 'type': 'object'}, 'ename': {'key': 'ename', 'type': 'str'}, 'evalue': {'key': 'evalue', 'type': 'str'}, 'traceback': {'key': 'traceback', 'type': '[str]'}, @@ -1059,21 +1059,6 @@ def __init__( self.traceback = kwargs.get('traceback', None) -class LivyStatementOutputData(msrest.serialization.Model): - """LivyStatementOutputData. - - """ - - _attribute_map = { - } - - def __init__( - self, - **kwargs - ): - super(LivyStatementOutputData, self).__init__(**kwargs) - - class LivyStatementRequestBody(msrest.serialization.Model): """LivyStatementRequestBody. diff --git a/sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py b/sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py index 9d9c28a5a74e..b157a54576ae 100644 --- a/sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py +++ b/sdk/synapse/azure-synapse/azure/synapse/models/_models_py3.py @@ -1202,7 +1202,7 @@ class LivyStatementOutput(msrest.serialization.Model): :param execution_count: :type execution_count: int :param data: - :type data: ~azure.synapse.models.LivyStatementOutputData + :type data: object :param ename: :type ename: str :param evalue: @@ -1214,7 +1214,7 @@ class LivyStatementOutput(msrest.serialization.Model): _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'execution_count': {'key': 'execution_count', 'type': 'int'}, - 'data': {'key': 'data', 'type': 'LivyStatementOutputData'}, + 'data': {'key': 'data', 'type': 'object'}, 'ename': {'key': 'ename', 'type': 'str'}, 'evalue': {'key': 'evalue', 'type': 'str'}, 'traceback': {'key': 'traceback', 'type': '[str]'}, @@ -1225,7 +1225,7 @@ def __init__( *, status: Optional[str] = None, execution_count: Optional[int] = None, - data: Optional["LivyStatementOutputData"] = None, + data: Optional[object] = None, ename: Optional[str] = None, evalue: Optional[str] = None, traceback: Optional[List[str]] = None, @@ -1240,21 +1240,6 @@ def __init__( self.traceback = traceback -class LivyStatementOutputData(msrest.serialization.Model): - """LivyStatementOutputData. - - """ - - _attribute_map = { - } - - def __init__( - self, - **kwargs - ): - super(LivyStatementOutputData, self).__init__(**kwargs) - - class LivyStatementRequestBody(msrest.serialization.Model): """LivyStatementRequestBody. From d424eb8e6a8f3926b07f9ced6f0824563ef4fbb1 Mon Sep 17 00:00:00 2001 From: Zhenyu Zhou Date: Mon, 17 Aug 2020 17:25:11 +0800 Subject: [PATCH 3/3] Skip test for releasing --- sdk/hdinsight/azure-mgmt-hdinsight/test/test_mgmt_hdinsight.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/hdinsight/azure-mgmt-hdinsight/test/test_mgmt_hdinsight.py b/sdk/hdinsight/azure-mgmt-hdinsight/test/test_mgmt_hdinsight.py index 0a9e312e43ec..47a77e84ead7 100644 --- a/sdk/hdinsight/azure-mgmt-hdinsight/test/test_mgmt_hdinsight.py +++ b/sdk/hdinsight/azure-mgmt-hdinsight/test/test_mgmt_hdinsight.py @@ -137,7 +137,7 @@ def test_create_kafka_cluster_with_managed_disks(self, resource_group, location, cluster = create_poller.result() self.validate_cluster(cluster_name, create_params, cluster) - # @unittest.skip('skipping temporarily to unblock azure-keyvault checkin') + @unittest.skip('skipping temporarily to unblock azure-keyvault checkin') @ResourceGroupPreparer(name_prefix='hdipy-', location=LOCATION) @StorageAccountPreparer(name_prefix='hdipy', location=LOCATION) @KeyVaultPreparer(name_prefix='hdipy', location=LOCATION, enable_soft_delete=True)