Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1329,7 +1329,6 @@
DayOfWeek,
DaysOfWeek,
RecurrenceFrequency,
DataFlowComputeType,
AzureFunctionActivityMethod,
WebActivityMethod,
OraclePartitionOption,
Expand Down Expand Up @@ -1359,6 +1358,7 @@
IntegrationRuntimeSsisCatalogPricingTier,
IntegrationRuntimeLicenseType,
IntegrationRuntimeEdition,
DataFlowComputeType,
SsisObjectMetadataType,
IntegrationRuntimeAuthKeyName,
CopyBehaviorType,
Expand Down Expand Up @@ -2053,7 +2053,6 @@
'DayOfWeek',
'DaysOfWeek',
'RecurrenceFrequency',
'DataFlowComputeType',
'AzureFunctionActivityMethod',
'WebActivityMethod',
'OraclePartitionOption',
Expand Down Expand Up @@ -2083,6 +2082,7 @@
'IntegrationRuntimeSsisCatalogPricingTier',
'IntegrationRuntimeLicenseType',
'IntegrationRuntimeEdition',
'DataFlowComputeType',
'SsisObjectMetadataType',
'IntegrationRuntimeAuthKeyName',
'CopyBehaviorType',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -384,13 +384,6 @@ class RecurrenceFrequency(str, Enum):
year = "Year"


class DataFlowComputeType(str, Enum):

general = "General"
memory_optimized = "MemoryOptimized"
compute_optimized = "ComputeOptimized"


class AzureFunctionActivityMethod(str, Enum):

get = "GET"
Expand Down Expand Up @@ -606,6 +599,13 @@ class IntegrationRuntimeEdition(str, Enum):
enterprise = "Enterprise"


class DataFlowComputeType(str, Enum):

general = "General"
memory_optimized = "MemoryOptimized"
compute_optimized = "ComputeOptimized"


class SsisObjectMetadataType(str, Enum):

folder = "Folder"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13425,6 +13425,18 @@ class ExecuteDataFlowActivity(ExecutionActivity):
:param compute: Compute properties for data flow activity.
:type compute:
~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute
:param trace_level: Trace level setting used for data flow monitoring
output. Supported values are: 'coarse', 'fine', and 'none'. Type: string
(or Expression with resultType string)
:type trace_level: object
:param continue_on_error: Continue on error setting used for data flow
execution. Enables processing to continue if a sink fails. Type: boolean
(or Expression with resultType boolean)
:type continue_on_error: object
:param run_concurrently: Concurrent run setting used for data flow
execution. Allows sinks with the same save order to be processed
concurrently. Type: boolean (or Expression with resultType boolean)
:type run_concurrently: object
"""

_validation = {
Expand All @@ -13446,6 +13458,9 @@ class ExecuteDataFlowActivity(ExecutionActivity):
'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'},
'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'},
'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'},
'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'},
'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'},
'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'},
}

def __init__(self, **kwargs):
Expand All @@ -13454,6 +13469,9 @@ def __init__(self, **kwargs):
self.staging = kwargs.get('staging', None)
self.integration_runtime = kwargs.get('integration_runtime', None)
self.compute = kwargs.get('compute', None)
self.trace_level = kwargs.get('trace_level', None)
self.continue_on_error = kwargs.get('continue_on_error', None)
self.run_concurrently = kwargs.get('run_concurrently', None)
self.type = 'ExecuteDataFlow'


Expand All @@ -13462,17 +13480,17 @@ class ExecuteDataFlowActivityTypePropertiesCompute(Model):

:param compute_type: Compute type of the cluster which will execute data
flow job. Possible values include: 'General', 'MemoryOptimized',
'ComputeOptimized'
:type compute_type: str or
~azure.mgmt.datafactory.models.DataFlowComputeType
'ComputeOptimized'. Type: string (or Expression with resultType string)
:type compute_type: object
:param core_count: Core count of the cluster which will execute data flow
job. Supported values are: 8, 16, 32, 48, 80, 144 and 272.
:type core_count: int
job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer
(or Expression with resultType integer)
:type core_count: object
"""

_attribute_map = {
'compute_type': {'key': 'computeType', 'type': 'str'},
'core_count': {'key': 'coreCount', 'type': 'int'},
'compute_type': {'key': 'computeType', 'type': 'object'},
'core_count': {'key': 'coreCount', 'type': 'object'},
}

def __init__(self, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13425,6 +13425,18 @@ class ExecuteDataFlowActivity(ExecutionActivity):
:param compute: Compute properties for data flow activity.
:type compute:
~azure.mgmt.datafactory.models.ExecuteDataFlowActivityTypePropertiesCompute
:param trace_level: Trace level setting used for data flow monitoring
output. Supported values are: 'coarse', 'fine', and 'none'. Type: string
(or Expression with resultType string)
:type trace_level: object
:param continue_on_error: Continue on error setting used for data flow
execution. Enables processing to continue if a sink fails. Type: boolean
(or Expression with resultType boolean)
:type continue_on_error: object
:param run_concurrently: Concurrent run setting used for data flow
execution. Allows sinks with the same save order to be processed
concurrently. Type: boolean (or Expression with resultType boolean)
:type run_concurrently: object
"""

_validation = {
Expand All @@ -13446,14 +13458,20 @@ class ExecuteDataFlowActivity(ExecutionActivity):
'staging': {'key': 'typeProperties.staging', 'type': 'DataFlowStagingInfo'},
'integration_runtime': {'key': 'typeProperties.integrationRuntime', 'type': 'IntegrationRuntimeReference'},
'compute': {'key': 'typeProperties.compute', 'type': 'ExecuteDataFlowActivityTypePropertiesCompute'},
'trace_level': {'key': 'typeProperties.traceLevel', 'type': 'object'},
'continue_on_error': {'key': 'typeProperties.continueOnError', 'type': 'object'},
'run_concurrently': {'key': 'typeProperties.runConcurrently', 'type': 'object'},
}

def __init__(self, *, name: str, data_flow, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, staging=None, integration_runtime=None, compute=None, **kwargs) -> None:
def __init__(self, *, name: str, data_flow, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, staging=None, integration_runtime=None, compute=None, trace_level=None, continue_on_error=None, run_concurrently=None, **kwargs) -> None:
super(ExecuteDataFlowActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs)
self.data_flow = data_flow
self.staging = staging
self.integration_runtime = integration_runtime
self.compute = compute
self.trace_level = trace_level
self.continue_on_error = continue_on_error
self.run_concurrently = run_concurrently
self.type = 'ExecuteDataFlow'


Expand All @@ -13462,20 +13480,20 @@ class ExecuteDataFlowActivityTypePropertiesCompute(Model):

:param compute_type: Compute type of the cluster which will execute data
flow job. Possible values include: 'General', 'MemoryOptimized',
'ComputeOptimized'
:type compute_type: str or
~azure.mgmt.datafactory.models.DataFlowComputeType
'ComputeOptimized'. Type: string (or Expression with resultType string)
:type compute_type: object
:param core_count: Core count of the cluster which will execute data flow
job. Supported values are: 8, 16, 32, 48, 80, 144 and 272.
:type core_count: int
job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. Type: integer
(or Expression with resultType integer)
:type core_count: object
"""

_attribute_map = {
'compute_type': {'key': 'computeType', 'type': 'str'},
'core_count': {'key': 'coreCount', 'type': 'int'},
'compute_type': {'key': 'computeType', 'type': 'object'},
'core_count': {'key': 'coreCount', 'type': 'object'},
}

def __init__(self, *, compute_type=None, core_count: int=None, **kwargs) -> None:
def __init__(self, *, compute_type=None, core_count=None, **kwargs) -> None:
super(ExecuteDataFlowActivityTypePropertiesCompute, self).__init__(**kwargs)
self.compute_type = compute_type
self.core_count = core_count
Expand Down