diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json index 9d25a2b50221..f82806df0877 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/_meta.json +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -4,7 +4,7 @@ "@autorest/python@5.8.4", "@autorest/modelerfour@4.19.2" ], - "commit": "2ce915398bfadd5333820487595a9623187dcb59", + "commit": "4de5239ea0deb949db68798673729bed7d013720", "repository_url": "https://github.com/Azure/azure-rest-api-specs", "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/python@5.8.4 --use=@autorest/modelerfour@4.19.2 --version=3.4.5", "readme": "specification/datafactory/resource-manager/readme.md" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py index 48944bf3938a..c47f66669f1b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_version.py @@ -6,4 +6,4 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -VERSION = "2.0.0" +VERSION = "1.0.0" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index d05e600bf0b6..9a93a161658f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -279,6 +279,7 @@ from ._models_py3 import FileSystemSink from ._models_py3 import FileSystemSource from ._models_py3 import FilterActivity + from ._models_py3 import Flowlet from ._models_py3 import ForEachActivity from ._models_py3 import FormatReadSettings from ._models_py3 import FormatWriteSettings @@ -974,6 +975,7 @@ from ._models import FileSystemSink # type: ignore from ._models import FileSystemSource # type: ignore from ._models import FilterActivity # type: ignore + from ._models import Flowlet # type: ignore from ._models import ForEachActivity # type: ignore from ._models import FormatReadSettings # type: ignore from ._models import FormatWriteSettings # type: ignore @@ -1768,6 +1770,7 @@ 'FileSystemSink', 'FileSystemSource', 'FilterActivity', + 'Flowlet', 'ForEachActivity', 'FormatReadSettings', 'FormatWriteSettings', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 5f4f0c460837..2e5839b4c5cc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -11051,7 +11051,7 @@ class DataFlow(msrest.serialization.Model): """Azure Data Factory nested object which contains a flow with data movements and transformations. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow, WranglingDataFlow. + sub-classes are: Flowlet, MappingDataFlow, WranglingDataFlow. All required parameters must be populated in order to send to Azure. @@ -11078,7 +11078,7 @@ class DataFlow(msrest.serialization.Model): } _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} + 'type': {'Flowlet': 'Flowlet', 'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} } def __init__( @@ -11190,6 +11190,8 @@ class DataFlowDebugPackage(msrest.serialization.Model): :type session_id: str :param data_flow: Data flow instance. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :param data_flows: List of Data flows. + :type data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] :param datasets: List of datasets. :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] :param linked_services: List of linked services. @@ -11204,6 +11206,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'data_flows': {'key': 'dataFlows', 'type': '[DataFlowDebugResource]'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, @@ -11218,6 +11221,7 @@ def __init__( self.additional_properties = kwargs.get('additional_properties', None) self.session_id = kwargs.get('session_id', None) self.data_flow = kwargs.get('data_flow', None) + self.data_flows = kwargs.get('data_flows', None) self.datasets = kwargs.get('datasets', None) self.linked_services = kwargs.get('linked_services', None) self.staging = kwargs.get('staging', None) @@ -11496,6 +11500,8 @@ class Transformation(msrest.serialization.Model): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference """ _validation = { @@ -11505,6 +11511,7 @@ class Transformation(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, } def __init__( @@ -11514,6 +11521,7 @@ def __init__( super(Transformation, self).__init__(**kwargs) self.name = kwargs['name'] self.description = kwargs.get('description', None) + self.flowlet = kwargs.get('flowlet', None) class DataFlowSink(Transformation): @@ -11525,6 +11533,8 @@ class DataFlowSink(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -11540,6 +11550,7 @@ class DataFlowSink(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -11564,6 +11575,8 @@ class DataFlowSource(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -11579,6 +11592,7 @@ class DataFlowSource(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -16009,6 +16023,70 @@ def __init__( self.condition = kwargs['condition'] +class Flowlet(DataFlow): + """Data flow flowlet. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. + :type type: str + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data flow. + :type annotations: list[any] + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param sources: List of sources in Flowlet. + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :param sinks: List of sinks in Flowlet. + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :param transformations: List of transformations in Flowlet. + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] + :param script: Flowlet script. + :type script: str + :param script_lines: Flowlet script lines. + :type script_lines: list[str] + :param additional_properties1: Any object. + :type additional_properties1: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, + 'additional_properties1': {'key': 'typeProperties.additionalProperties', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(Flowlet, self).__init__(**kwargs) + self.type = 'Flowlet' # type: str + self.additional_properties = kwargs.get('additional_properties', None) + self.sources = kwargs.get('sources', None) + self.sinks = kwargs.get('sinks', None) + self.transformations = kwargs.get('transformations', None) + self.script = kwargs.get('script', None) + self.script_lines = kwargs.get('script_lines', None) + self.additional_properties1 = kwargs.get('additional_properties1', None) + + class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. @@ -22502,6 +22580,8 @@ class MappingDataFlow(DataFlow): :type transformations: list[~azure.mgmt.datafactory.models.Transformation] :param script: DataFlow script. :type script: str + :param script_lines: Data flow script lines. + :type script_lines: list[str] """ _validation = { @@ -22517,6 +22597,7 @@ class MappingDataFlow(DataFlow): 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, } def __init__( @@ -22529,6 +22610,7 @@ def __init__( self.sinks = kwargs.get('sinks', None) self.transformations = kwargs.get('transformations', None) self.script = kwargs.get('script', None) + self.script_lines = kwargs.get('script_lines', None) class MariaDBLinkedService(LinkedService): @@ -27506,6 +27588,8 @@ class PowerQuerySink(DataFlowSink): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -27523,6 +27607,7 @@ class PowerQuerySink(DataFlowSink): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -27569,6 +27654,8 @@ class PowerQuerySource(DataFlowSource): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -27586,6 +27673,7 @@ class PowerQuerySource(DataFlowSource): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 963145597ea9..62ae06b6f9a6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -12696,7 +12696,7 @@ class DataFlow(msrest.serialization.Model): """Azure Data Factory nested object which contains a flow with data movements and transformations. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MappingDataFlow, WranglingDataFlow. + sub-classes are: Flowlet, MappingDataFlow, WranglingDataFlow. All required parameters must be populated in order to send to Azure. @@ -12723,7 +12723,7 @@ class DataFlow(msrest.serialization.Model): } _subtype_map = { - 'type': {'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} + 'type': {'Flowlet': 'Flowlet', 'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'} } def __init__( @@ -12851,6 +12851,8 @@ class DataFlowDebugPackage(msrest.serialization.Model): :type session_id: str :param data_flow: Data flow instance. :type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource + :param data_flows: List of Data flows. + :type data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource] :param datasets: List of datasets. :type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource] :param linked_services: List of linked services. @@ -12865,6 +12867,7 @@ class DataFlowDebugPackage(msrest.serialization.Model): 'additional_properties': {'key': '', 'type': '{object}'}, 'session_id': {'key': 'sessionId', 'type': 'str'}, 'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'}, + 'data_flows': {'key': 'dataFlows', 'type': '[DataFlowDebugResource]'}, 'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'}, 'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'}, 'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'}, @@ -12877,6 +12880,7 @@ def __init__( additional_properties: Optional[Dict[str, Any]] = None, session_id: Optional[str] = None, data_flow: Optional["DataFlowDebugResource"] = None, + data_flows: Optional[List["DataFlowDebugResource"]] = None, datasets: Optional[List["DatasetDebugResource"]] = None, linked_services: Optional[List["LinkedServiceDebugResource"]] = None, staging: Optional["DataFlowStagingInfo"] = None, @@ -12887,6 +12891,7 @@ def __init__( self.additional_properties = additional_properties self.session_id = session_id self.data_flow = data_flow + self.data_flows = data_flows self.datasets = datasets self.linked_services = linked_services self.staging = staging @@ -13196,6 +13201,8 @@ class Transformation(msrest.serialization.Model): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference """ _validation = { @@ -13205,6 +13212,7 @@ class Transformation(msrest.serialization.Model): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, } def __init__( @@ -13212,11 +13220,13 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, **kwargs ): super(Transformation, self).__init__(**kwargs) self.name = name self.description = description + self.flowlet = flowlet class DataFlowSink(Transformation): @@ -13228,6 +13238,8 @@ class DataFlowSink(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -13243,6 +13255,7 @@ class DataFlowSink(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -13253,12 +13266,13 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): - super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) + super(DataFlowSink, self).__init__(name=name, description=description, flowlet=flowlet, **kwargs) self.dataset = dataset self.linked_service = linked_service self.schema_linked_service = schema_linked_service @@ -13273,6 +13287,8 @@ class DataFlowSource(Transformation): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -13288,6 +13304,7 @@ class DataFlowSource(Transformation): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -13298,12 +13315,13 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, **kwargs ): - super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) + super(DataFlowSource, self).__init__(name=name, description=description, flowlet=flowlet, **kwargs) self.dataset = dataset self.linked_service = linked_service self.schema_linked_service = schema_linked_service @@ -18359,6 +18377,81 @@ def __init__( self.condition = condition +class Flowlet(DataFlow): + """Data flow flowlet. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of data flow.Constant filled by server. + :type type: str + :param description: The description of the data flow. + :type description: str + :param annotations: List of tags that can be used for describing the data flow. + :type annotations: list[any] + :param folder: The folder that this data flow is in. If not specified, Data flow will appear at + the root level. + :type folder: ~azure.mgmt.datafactory.models.DataFlowFolder + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, any] + :param sources: List of sources in Flowlet. + :type sources: list[~azure.mgmt.datafactory.models.DataFlowSource] + :param sinks: List of sinks in Flowlet. + :type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink] + :param transformations: List of transformations in Flowlet. + :type transformations: list[~azure.mgmt.datafactory.models.Transformation] + :param script: Flowlet script. + :type script: str + :param script_lines: Flowlet script lines. + :type script_lines: list[str] + :param additional_properties1: Any object. + :type additional_properties1: any + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DataFlowFolder'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'}, + 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, + 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, + 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, + 'additional_properties1': {'key': 'typeProperties.additionalProperties', 'type': 'object'}, + } + + def __init__( + self, + *, + description: Optional[str] = None, + annotations: Optional[List[Any]] = None, + folder: Optional["DataFlowFolder"] = None, + additional_properties: Optional[Dict[str, Any]] = None, + sources: Optional[List["DataFlowSource"]] = None, + sinks: Optional[List["DataFlowSink"]] = None, + transformations: Optional[List["Transformation"]] = None, + script: Optional[str] = None, + script_lines: Optional[List[str]] = None, + additional_properties1: Optional[Any] = None, + **kwargs + ): + super(Flowlet, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) + self.type = 'Flowlet' # type: str + self.additional_properties = additional_properties + self.sources = sources + self.sinks = sinks + self.transformations = transformations + self.script = script + self.script_lines = script_lines + self.additional_properties1 = additional_properties1 + + class ForEachActivity(ControlActivity): """This activity is used for iterating over a collection and execute given activities. @@ -25763,6 +25856,8 @@ class MappingDataFlow(DataFlow): :type transformations: list[~azure.mgmt.datafactory.models.Transformation] :param script: DataFlow script. :type script: str + :param script_lines: Data flow script lines. + :type script_lines: list[str] """ _validation = { @@ -25778,6 +25873,7 @@ class MappingDataFlow(DataFlow): 'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'}, 'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'}, 'script': {'key': 'typeProperties.script', 'type': 'str'}, + 'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'}, } def __init__( @@ -25790,6 +25886,7 @@ def __init__( sinks: Optional[List["DataFlowSink"]] = None, transformations: Optional[List["Transformation"]] = None, script: Optional[str] = None, + script_lines: Optional[List[str]] = None, **kwargs ): super(MappingDataFlow, self).__init__(description=description, annotations=annotations, folder=folder, **kwargs) @@ -25798,6 +25895,7 @@ def __init__( self.sinks = sinks self.transformations = transformations self.script = script + self.script_lines = script_lines class MariaDBLinkedService(LinkedService): @@ -31531,6 +31629,8 @@ class PowerQuerySink(DataFlowSink): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -31548,6 +31648,7 @@ class PowerQuerySink(DataFlowSink): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -31559,13 +31660,14 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, script: Optional[str] = None, **kwargs ): - super(PowerQuerySink, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) + super(PowerQuerySink, self).__init__(name=name, description=description, flowlet=flowlet, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) self.script = script @@ -31604,6 +31706,8 @@ class PowerQuerySource(DataFlowSource): :type name: str :param description: Transformation description. :type description: str + :param flowlet: Flowlet Reference. + :type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference :param dataset: Dataset reference. :type dataset: ~azure.mgmt.datafactory.models.DatasetReference :param linked_service: Linked service reference. @@ -31621,6 +31725,7 @@ class PowerQuerySource(DataFlowSource): _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, + 'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, 'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'}, 'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'}, @@ -31632,13 +31737,14 @@ def __init__( *, name: str, description: Optional[str] = None, + flowlet: Optional["DataFlowReference"] = None, dataset: Optional["DatasetReference"] = None, linked_service: Optional["LinkedServiceReference"] = None, schema_linked_service: Optional["LinkedServiceReference"] = None, script: Optional[str] = None, **kwargs ): - super(PowerQuerySource, self).__init__(name=name, description=description, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) + super(PowerQuerySource, self).__init__(name=name, description=description, flowlet=flowlet, dataset=dataset, linked_service=linked_service, schema_linked_service=schema_linked_service, **kwargs) self.script = script