Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/datafactory/azure-mgmt-datafactory/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

This is the Microsoft Azure Data Factory Management Client Library.
This package has been tested with Python 2.7, 3.5, 3.6, 3.7 and 3.8.
For a more complete view of Azure libraries, see the [Github repo](https://github.com/Azure/azure-sdk-for-python/)
For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all).


# Usage
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10024,8 +10024,6 @@ class DataFlowSink(Transformation):
:param schema_linked_service: Schema linked service reference.
:type schema_linked_service:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param staging: Staging info for execute data flow activity.
:type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo
"""

_validation = {
Expand All @@ -10038,15 +10036,13 @@ class DataFlowSink(Transformation):
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'},
}

def __init__(self, **kwargs):
super(DataFlowSink, self).__init__(**kwargs)
self.dataset = kwargs.get('dataset', None)
self.linked_service = kwargs.get('linked_service', None)
self.schema_linked_service = kwargs.get('schema_linked_service', None)
self.staging = kwargs.get('staging', None)


class DataFlowSource(Transformation):
Expand All @@ -10066,8 +10062,6 @@ class DataFlowSource(Transformation):
:param schema_linked_service: Schema linked service reference.
:type schema_linked_service:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param staging: Staging info for execute data flow activity.
:type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo
"""

_validation = {
Expand All @@ -10080,15 +10074,13 @@ class DataFlowSource(Transformation):
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'},
}

def __init__(self, **kwargs):
super(DataFlowSource, self).__init__(**kwargs)
self.dataset = kwargs.get('dataset', None)
self.linked_service = kwargs.get('linked_service', None)
self.schema_linked_service = kwargs.get('schema_linked_service', None)
self.staging = kwargs.get('staging', None)


class DataFlowSourceSetting(Model):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10024,8 +10024,6 @@ class DataFlowSink(Transformation):
:param schema_linked_service: Schema linked service reference.
:type schema_linked_service:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param staging: Staging info for execute data flow activity.
:type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo
"""

_validation = {
Expand All @@ -10038,15 +10036,13 @@ class DataFlowSink(Transformation):
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'},
}

def __init__(self, *, name: str, description: str=None, dataset=None, linked_service=None, schema_linked_service=None, staging=None, **kwargs) -> None:
def __init__(self, *, name: str, description: str=None, dataset=None, linked_service=None, schema_linked_service=None, **kwargs) -> None:
super(DataFlowSink, self).__init__(name=name, description=description, **kwargs)
self.dataset = dataset
self.linked_service = linked_service
self.schema_linked_service = schema_linked_service
self.staging = staging


class DataFlowSource(Transformation):
Expand All @@ -10066,8 +10062,6 @@ class DataFlowSource(Transformation):
:param schema_linked_service: Schema linked service reference.
:type schema_linked_service:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param staging: Staging info for execute data flow activity.
:type staging: ~azure.mgmt.datafactory.models.DataFlowStagingInfo
"""

_validation = {
Expand All @@ -10080,15 +10074,13 @@ class DataFlowSource(Transformation):
'dataset': {'key': 'dataset', 'type': 'DatasetReference'},
'linked_service': {'key': 'linkedService', 'type': 'LinkedServiceReference'},
'schema_linked_service': {'key': 'schemaLinkedService', 'type': 'LinkedServiceReference'},
'staging': {'key': 'staging', 'type': 'DataFlowStagingInfo'},
}

def __init__(self, *, name: str, description: str=None, dataset=None, linked_service=None, schema_linked_service=None, staging=None, **kwargs) -> None:
def __init__(self, *, name: str, description: str=None, dataset=None, linked_service=None, schema_linked_service=None, **kwargs) -> None:
super(DataFlowSource, self).__init__(name=name, description=description, **kwargs)
self.dataset = dataset
self.linked_service = linked_service
self.schema_linked_service = schema_linked_service
self.staging = staging


class DataFlowSourceSetting(Model):
Expand Down