diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index d947b8055c7b..bf9b1bbc20e8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -283,6 +283,7 @@ from ._models_py3 import IntegrationRuntimeComputeProperties from ._models_py3 import IntegrationRuntimeConnectionInfo from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties + from ._models_py3 import IntegrationRuntimeDataFlowProperties from ._models_py3 import IntegrationRuntimeDataProxyProperties from ._models_py3 import IntegrationRuntimeMonitoringData from ._models_py3 import IntegrationRuntimeNodeIpAddress @@ -833,6 +834,7 @@ from ._models import IntegrationRuntimeComputeProperties from ._models import IntegrationRuntimeConnectionInfo from ._models import IntegrationRuntimeCustomSetupScriptProperties + from ._models import IntegrationRuntimeDataFlowProperties from ._models import IntegrationRuntimeDataProxyProperties from ._models import IntegrationRuntimeMonitoringData from ._models import IntegrationRuntimeNodeIpAddress @@ -1191,6 +1193,7 @@ IntegrationRuntimeSsisCatalogPricingTier, IntegrationRuntimeLicenseType, IntegrationRuntimeEdition, + DataFlowComputeType, SsisObjectMetadataType, IntegrationRuntimeAuthKeyName, ) @@ -1469,6 +1472,7 @@ 'IntegrationRuntimeComputeProperties', 'IntegrationRuntimeConnectionInfo', 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeDataFlowProperties', 'IntegrationRuntimeDataProxyProperties', 'IntegrationRuntimeMonitoringData', 'IntegrationRuntimeNodeIpAddress', @@ -1826,6 +1830,7 @@ 'IntegrationRuntimeSsisCatalogPricingTier', 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeEdition', + 'DataFlowComputeType', 'SsisObjectMetadataType', 'IntegrationRuntimeAuthKeyName', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 052712638b66..e3abf23ceae4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -552,6 +552,13 @@ class IntegrationRuntimeEdition(str, Enum): enterprise = "Enterprise" +class DataFlowComputeType(str, Enum): + + general = "General" + memory_optimized = "MemoryOptimized" + compute_optimized = "ComputeOptimized" + + class SsisObjectMetadataType(str, Enum): folder = "Folder" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 15a9a5bb6b36..695779fde22d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -16059,6 +16059,10 @@ class IntegrationRuntimeComputeProperties(Model): :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime. :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration + runtime. + :type data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties :param v_net_properties: VNet properties for managed integration runtime. :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties @@ -16075,6 +16079,7 @@ class IntegrationRuntimeComputeProperties(Model): 'node_size': {'key': 'nodeSize', 'type': 'str'}, 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, } @@ -16085,6 +16090,7 @@ def __init__(self, **kwargs): self.node_size = kwargs.get('node_size', None) self.number_of_nodes = kwargs.get('number_of_nodes', None) self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.data_flow_properties = kwargs.get('data_flow_properties', None) self.v_net_properties = kwargs.get('v_net_properties', None) @@ -16167,6 +16173,44 @@ def __init__(self, **kwargs): self.sas_token = kwargs.get('sas_token', None) +class IntegrationRuntimeDataFlowProperties(Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data + flow job. Possible values include: 'General', 'MemoryOptimized', + 'ComputeOptimized' + :type compute_type: str or + ~azure.mgmt.datafactory.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow + job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster + which will execute data flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.time_to_live = kwargs.get('time_to_live', None) + + class IntegrationRuntimeDataProxyProperties(Model): """Data proxy properties for a managed dedicated integration runtime. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 76463e2366dc..0f19bb4915c9 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -16059,6 +16059,10 @@ class IntegrationRuntimeComputeProperties(Model): :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime. :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration + runtime. + :type data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties :param v_net_properties: VNet properties for managed integration runtime. :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties @@ -16075,16 +16079,18 @@ class IntegrationRuntimeComputeProperties(Model): 'node_size': {'key': 'nodeSize', 'type': 'str'}, 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, } - def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, data_flow_properties=None, v_net_properties=None, **kwargs) -> None: super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.location = location self.node_size = node_size self.number_of_nodes = number_of_nodes self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.data_flow_properties = data_flow_properties self.v_net_properties = v_net_properties @@ -16167,6 +16173,44 @@ def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> self.sas_token = sas_token +class IntegrationRuntimeDataFlowProperties(Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data + flow job. Possible values include: 'General', 'MemoryOptimized', + 'ComputeOptimized' + :type compute_type: str or + ~azure.mgmt.datafactory.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow + job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster + which will execute data flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, compute_type=None, core_count: int=None, time_to_live: int=None, **kwargs) -> None: + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.compute_type = compute_type + self.core_count = core_count + self.time_to_live = time_to_live + + class IntegrationRuntimeDataProxyProperties(Model): """Data proxy properties for a managed dedicated integration runtime. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py index 8281c8a22a5d..c46f24701eb0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py @@ -12,6 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling from .. import models @@ -39,29 +41,9 @@ def __init__(self, client, config, serializer, deserializer): self.config = config - def create( - self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): - """Creates a data flow debug session. - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param request: Data flow debug session definition - :type request: - ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: CreateDataFlowDebugSessionResponse or ClientRawResponse if - raw=true - :rtype: - ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse or - ~msrest.pipeline.ClientRawResponse - :raises: :class:`CloudError` - """ + def _create_initial( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create.metadata['url'] path_format_arguments = { @@ -98,8 +80,9 @@ def create( exp.request_id = response.headers.get('x-ms-request-id') raise exp - header_dict = {} deserialized = None + header_dict = {} + if response.status_code == 200: deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', response) header_dict = { @@ -112,6 +95,61 @@ def create( return client_raw_response return deserialized + + def create( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug session definition + :type request: + ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + CreateDataFlowDebugSessionResponse or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse]] + :raises: :class:`CloudError` + """ + raw_result = self._create_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + request=request, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'location': 'str', + } + deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} def query_by_factory( @@ -316,27 +354,9 @@ def delete( return client_raw_response delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} - def execute_command( - self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): - """Execute a data flow debug command. - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param request: Data flow debug command definition. - :type request: - ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: DataFlowDebugCommandResponse or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse or - ~msrest.pipeline.ClientRawResponse - :raises: :class:`CloudError` - """ + def _execute_command_initial( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.execute_command.metadata['url'] path_format_arguments = { @@ -373,8 +393,9 @@ def execute_command( exp.request_id = response.headers.get('x-ms-request-id') raise exp - header_dict = {} deserialized = None + header_dict = {} + if response.status_code == 200: deserialized = self._deserialize('DataFlowDebugCommandResponse', response) header_dict = { @@ -387,4 +408,59 @@ def execute_command( return client_raw_response return deserialized + + def execute_command( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, polling=True, **operation_config): + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug command definition. + :type request: + ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + DataFlowDebugCommandResponse or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse]] + :raises: :class:`CloudError` + """ + raw_result = self._execute_command_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + request=request, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'location': 'str', + } + deserialized = self._deserialize('DataFlowDebugCommandResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'}