Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/datafactory/azure-mgmt-datafactory/_meta.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"@autorest/[email protected]",
"@autorest/[email protected]"
],
"commit": "2ce915398bfadd5333820487595a9623187dcb59",
"commit": "4de5239ea0deb949db68798673729bed7d013720",
"repository_url": "https://github.com/Azure/azure-rest-api-specs",
"autorest_command": "autorest specification/datafactory/resource-manager/readme.md --multiapi --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --track2 --use=@autorest/[email protected] --use=@autorest/[email protected] --version=3.4.5",
"readme": "specification/datafactory/resource-manager/readme.md"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

VERSION = "2.0.0"
VERSION = "1.0.0"
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,7 @@
from ._models_py3 import FileSystemSink
from ._models_py3 import FileSystemSource
from ._models_py3 import FilterActivity
from ._models_py3 import Flowlet
from ._models_py3 import ForEachActivity
from ._models_py3 import FormatReadSettings
from ._models_py3 import FormatWriteSettings
Expand Down Expand Up @@ -974,6 +975,7 @@
from ._models import FileSystemSink # type: ignore
from ._models import FileSystemSource # type: ignore
from ._models import FilterActivity # type: ignore
from ._models import Flowlet # type: ignore
from ._models import ForEachActivity # type: ignore
from ._models import FormatReadSettings # type: ignore
from ._models import FormatWriteSettings # type: ignore
Expand Down Expand Up @@ -1768,6 +1770,7 @@
'FileSystemSink',
'FileSystemSource',
'FilterActivity',
'Flowlet',
'ForEachActivity',
'FormatReadSettings',
'FormatWriteSettings',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11051,7 +11051,7 @@ class DataFlow(msrest.serialization.Model):
"""Azure Data Factory nested object which contains a flow with data movements and transformations.

You probably want to use the sub-classes and not this class directly. Known
sub-classes are: MappingDataFlow, WranglingDataFlow.
sub-classes are: Flowlet, MappingDataFlow, WranglingDataFlow.

All required parameters must be populated in order to send to Azure.

Expand All @@ -11078,7 +11078,7 @@ class DataFlow(msrest.serialization.Model):
}

_subtype_map = {
'type': {'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'}
'type': {'Flowlet': 'Flowlet', 'MappingDataFlow': 'MappingDataFlow', 'WranglingDataFlow': 'WranglingDataFlow'}
}

def __init__(
Expand Down Expand Up @@ -11190,6 +11190,8 @@ class DataFlowDebugPackage(msrest.serialization.Model):
:type session_id: str
:param data_flow: Data flow instance.
:type data_flow: ~azure.mgmt.datafactory.models.DataFlowDebugResource
:param data_flows: List of Data flows.
:type data_flows: list[~azure.mgmt.datafactory.models.DataFlowDebugResource]
:param datasets: List of datasets.
:type datasets: list[~azure.mgmt.datafactory.models.DatasetDebugResource]
:param linked_services: List of linked services.
Expand All @@ -11204,6 +11206,7 @@ class DataFlowDebugPackage(msrest.serialization.Model):
'additional_properties': {'key': '', 'type': '{object}'},
'session_id': {'key': 'sessionId', 'type': 'str'},
'data_flow': {'key': 'dataFlow', 'type': 'DataFlowDebugResource'},
'data_flows': {'key': 'dataFlows', 'type': '[DataFlowDebugResource]'},
'datasets': {'key': 'datasets', 'type': '[DatasetDebugResource]'},
'linked_services': {'key': 'linkedServices', 'type': '[LinkedServiceDebugResource]'},
'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'},
Expand All @@ -11218,6 +11221,7 @@ def __init__(
self.additional_properties = kwargs.get('additional_properties', None)
self.session_id = kwargs.get('session_id', None)
self.data_flow = kwargs.get('data_flow', None)
self.data_flows = kwargs.get('data_flows', None)
self.datasets = kwargs.get('datasets', None)
self.linked_services = kwargs.get('linked_services', None)
self.staging = kwargs.get('staging', None)
Expand Down Expand Up @@ -11496,6 +11500,8 @@ class Transformation(msrest.serialization.Model):
:type name: str
:param description: Transformation description.
:type description: str
:param flowlet: Flowlet Reference.
:type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference
"""

_validation = {
Expand All @@ -11505,6 +11511,7 @@ class Transformation(msrest.serialization.Model):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'},
}

def __init__(
Expand All @@ -11514,6 +11521,7 @@ def __init__(
super(Transformation, self).__init__(**kwargs)
self.name = kwargs['name']
self.description = kwargs.get('description', None)
self.flowlet = kwargs.get('flowlet', None)


class DataFlowSink(Transformation):
Expand All @@ -11525,6 +11533,8 @@ class DataFlowSink(Transformation):
:type name: str
:param description: Transformation description.
:type description: str
:param flowlet: Flowlet Reference.
:type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference
:param dataset: Dataset reference.
:type dataset: ~azure.mgmt.datafactory.models.DatasetReference
:param linked_service: Linked service reference.
Expand All @@ -11540,6 +11550,7 @@ class DataFlowSink(Transformation):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'},
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
Expand All @@ -11564,6 +11575,8 @@ class DataFlowSource(Transformation):
:type name: str
:param description: Transformation description.
:type description: str
:param flowlet: Flowlet Reference.
:type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference
:param dataset: Dataset reference.
:type dataset: ~azure.mgmt.datafactory.models.DatasetReference
:param linked_service: Linked service reference.
Expand All @@ -11579,6 +11592,7 @@ class DataFlowSource(Transformation):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'},
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
Expand Down Expand Up @@ -16009,6 +16023,70 @@ def __init__(
self.condition = kwargs['condition']


class Flowlet(DataFlow):
"""Data flow flowlet.

All required parameters must be populated in order to send to Azure.

:param type: Required. Type of data flow.Constant filled by server.
:type type: str
:param description: The description of the data flow.
:type description: str
:param annotations: List of tags that can be used for describing the data flow.
:type annotations: list[any]
:param folder: The folder that this data flow is in. If not specified, Data flow will appear at
the root level.
:type folder: ~azure.mgmt.datafactory.models.DataFlowFolder
:param additional_properties: Unmatched properties from the message are deserialized to this
collection.
:type additional_properties: dict[str, any]
:param sources: List of sources in Flowlet.
:type sources: list[~azure.mgmt.datafactory.models.DataFlowSource]
:param sinks: List of sinks in Flowlet.
:type sinks: list[~azure.mgmt.datafactory.models.DataFlowSink]
:param transformations: List of transformations in Flowlet.
:type transformations: list[~azure.mgmt.datafactory.models.Transformation]
:param script: Flowlet script.
:type script: str
:param script_lines: Flowlet script lines.
:type script_lines: list[str]
:param additional_properties1: Any object.
:type additional_properties1: any
"""

_validation = {
'type': {'required': True},
}

_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DataFlowFolder'},
'additional_properties': {'key': '', 'type': '{object}'},
'sources': {'key': 'typeProperties.sources', 'type': '[DataFlowSource]'},
'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'},
'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'},
'script': {'key': 'typeProperties.script', 'type': 'str'},
'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'},
'additional_properties1': {'key': 'typeProperties.additionalProperties', 'type': 'object'},
}

def __init__(
self,
**kwargs
):
super(Flowlet, self).__init__(**kwargs)
self.type = 'Flowlet' # type: str
self.additional_properties = kwargs.get('additional_properties', None)
self.sources = kwargs.get('sources', None)
self.sinks = kwargs.get('sinks', None)
self.transformations = kwargs.get('transformations', None)
self.script = kwargs.get('script', None)
self.script_lines = kwargs.get('script_lines', None)
self.additional_properties1 = kwargs.get('additional_properties1', None)


class ForEachActivity(ControlActivity):
"""This activity is used for iterating over a collection and execute given activities.

Expand Down Expand Up @@ -22502,6 +22580,8 @@ class MappingDataFlow(DataFlow):
:type transformations: list[~azure.mgmt.datafactory.models.Transformation]
:param script: DataFlow script.
:type script: str
:param script_lines: Data flow script lines.
:type script_lines: list[str]
"""

_validation = {
Expand All @@ -22517,6 +22597,7 @@ class MappingDataFlow(DataFlow):
'sinks': {'key': 'typeProperties.sinks', 'type': '[DataFlowSink]'},
'transformations': {'key': 'typeProperties.transformations', 'type': '[Transformation]'},
'script': {'key': 'typeProperties.script', 'type': 'str'},
'script_lines': {'key': 'typeProperties.scriptLines', 'type': '[str]'},
}

def __init__(
Expand All @@ -22529,6 +22610,7 @@ def __init__(
self.sinks = kwargs.get('sinks', None)
self.transformations = kwargs.get('transformations', None)
self.script = kwargs.get('script', None)
self.script_lines = kwargs.get('script_lines', None)


class MariaDBLinkedService(LinkedService):
Expand Down Expand Up @@ -27506,6 +27588,8 @@ class PowerQuerySink(DataFlowSink):
:type name: str
:param description: Transformation description.
:type description: str
:param flowlet: Flowlet Reference.
:type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference
:param dataset: Dataset reference.
:type dataset: ~azure.mgmt.datafactory.models.DatasetReference
:param linked_service: Linked service reference.
Expand All @@ -27523,6 +27607,7 @@ class PowerQuerySink(DataFlowSink):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'},
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
Expand Down Expand Up @@ -27569,6 +27654,8 @@ class PowerQuerySource(DataFlowSource):
:type name: str
:param description: Transformation description.
:type description: str
:param flowlet: Flowlet Reference.
:type flowlet: ~azure.mgmt.datafactory.models.DataFlowReference
:param dataset: Dataset reference.
:type dataset: ~azure.mgmt.datafactory.models.DatasetReference
:param linked_service: Linked service reference.
Expand All @@ -27586,6 +27673,7 @@ class PowerQuerySource(DataFlowSource):
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'flowlet': {'key': 'flowlet', 'type': 'DataFlowReference'},
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
Expand Down
Loading