diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index ec8185523fbd..78ff241e7898 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -26,7 +26,6 @@ from .operations import ActivityRunsOperations from .operations import TriggersOperations from .operations import TriggerRunsOperations -from .operations import RerunTriggersOperations from .operations import DataFlowsOperations from .operations import DataFlowDebugSessionOperations from . import models @@ -64,8 +63,6 @@ class DataFactoryManagementClient(SDKClient): :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations :ivar trigger_runs: TriggerRuns operations :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations - :ivar rerun_triggers: RerunTriggers operations - :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :ivar data_flows: DataFlows operations :vartype data_flows: azure.mgmt.datafactory.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSession operations @@ -116,8 +113,6 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.trigger_runs = TriggerRunsOperations( self._client, self.config, self._serialize, self._deserialize) - self.rerun_triggers = RerunTriggersOperations( - self._client, self.config, self._serialize, self._deserialize) self.data_flows = DataFlowsOperations( self._client, self.config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 4e9756b6aad1..3ca7ea25399a 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -17,6 +17,7 @@ from ._models_py3 import ActivityRun from ._models_py3 import ActivityRunsQueryResponse from ._models_py3 import AddDataFlowToDebugSessionResponse + from ._models_py3 import AdditionalColumns from ._models_py3 import AmazonMWSLinkedService from ._models_py3 import AmazonMWSObjectDataset from ._models_py3 import AmazonMWSSource @@ -426,9 +427,7 @@ from ._models_py3 import RedshiftUnloadSettings from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset - from ._models_py3 import RerunTriggerResource from ._models_py3 import RerunTumblingWindowTrigger - from ._models_py3 import RerunTumblingWindowTriggerActionParameters from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService from ._models_py3 import ResponsysObjectDataset @@ -492,6 +491,7 @@ from ._models_py3 import ShopifyLinkedService from ._models_py3 import ShopifyObjectDataset from ._models_py3 import ShopifySource + from ._models_py3 import SkipErrorFile from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource @@ -546,7 +546,9 @@ from ._models_py3 import Transformation from ._models_py3 import Trigger from ._models_py3 import TriggerDependencyReference + from ._models_py3 import TriggerFilterParameters from ._models_py3 import TriggerPipelineReference + from ._models_py3 import TriggerQueryResponse from ._models_py3 import TriggerReference from ._models_py3 import TriggerResource from ._models_py3 import TriggerRun @@ -589,6 +591,7 @@ from ._models import ActivityRun from ._models import ActivityRunsQueryResponse from ._models import AddDataFlowToDebugSessionResponse + from ._models import AdditionalColumns from ._models import AmazonMWSLinkedService from ._models import AmazonMWSObjectDataset from ._models import AmazonMWSSource @@ -998,9 +1001,7 @@ from ._models import RedshiftUnloadSettings from ._models import RelationalSource from ._models import RelationalTableDataset - from ._models import RerunTriggerResource from ._models import RerunTumblingWindowTrigger - from ._models import RerunTumblingWindowTriggerActionParameters from ._models import Resource from ._models import ResponsysLinkedService from ._models import ResponsysObjectDataset @@ -1064,6 +1065,7 @@ from ._models import ShopifyLinkedService from ._models import ShopifyObjectDataset from ._models import ShopifySource + from ._models import SkipErrorFile from ._models import SparkLinkedService from ._models import SparkObjectDataset from ._models import SparkSource @@ -1118,7 +1120,9 @@ from ._models import Transformation from ._models import Trigger from ._models import TriggerDependencyReference + from ._models import TriggerFilterParameters from ._models import TriggerPipelineReference + from ._models import TriggerQueryResponse from ._models import TriggerReference from ._models import TriggerResource from ._models import TriggerRun @@ -1161,7 +1165,6 @@ from ._paged_models import LinkedServiceResourcePaged from ._paged_models import OperationPaged from ._paged_models import PipelineResourcePaged -from ._paged_models import RerunTriggerResourcePaged from ._paged_models import TriggerResourcePaged from ._data_factory_management_client_enums import ( IntegrationRuntimeState, @@ -1177,11 +1180,6 @@ RunQueryOrder, TriggerRunStatus, DataFlowDebugCommandType, - TumblingWindowFrequency, - BlobEventTypes, - DayOfWeek, - DaysOfWeek, - RecurrenceFrequency, GoogleAdWordsAuthenticationType, SparkServerType, SparkThriftTransportProtocol, @@ -1210,6 +1208,11 @@ DynamicsAuthenticationType, OrcCompressionCodec, AvroCompressionCodec, + TumblingWindowFrequency, + BlobEventTypes, + DayOfWeek, + DaysOfWeek, + RecurrenceFrequency, DataFlowComputeType, AzureFunctionActivityMethod, WebActivityMethod, @@ -1250,6 +1253,7 @@ 'ActivityRun', 'ActivityRunsQueryResponse', 'AddDataFlowToDebugSessionResponse', + 'AdditionalColumns', 'AmazonMWSLinkedService', 'AmazonMWSObjectDataset', 'AmazonMWSSource', @@ -1659,9 +1663,7 @@ 'RedshiftUnloadSettings', 'RelationalSource', 'RelationalTableDataset', - 'RerunTriggerResource', 'RerunTumblingWindowTrigger', - 'RerunTumblingWindowTriggerActionParameters', 'Resource', 'ResponsysLinkedService', 'ResponsysObjectDataset', @@ -1725,6 +1727,7 @@ 'ShopifyLinkedService', 'ShopifyObjectDataset', 'ShopifySource', + 'SkipErrorFile', 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', @@ -1779,7 +1782,9 @@ 'Transformation', 'Trigger', 'TriggerDependencyReference', + 'TriggerFilterParameters', 'TriggerPipelineReference', + 'TriggerQueryResponse', 'TriggerReference', 'TriggerResource', 'TriggerRun', @@ -1821,7 +1826,6 @@ 'DatasetResourcePaged', 'PipelineResourcePaged', 'TriggerResourcePaged', - 'RerunTriggerResourcePaged', 'DataFlowResourcePaged', 'DataFlowDebugSessionInfoPaged', 'IntegrationRuntimeState', @@ -1837,11 +1841,6 @@ 'RunQueryOrder', 'TriggerRunStatus', 'DataFlowDebugCommandType', - 'TumblingWindowFrequency', - 'BlobEventTypes', - 'DayOfWeek', - 'DaysOfWeek', - 'RecurrenceFrequency', 'GoogleAdWordsAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', @@ -1870,6 +1869,11 @@ 'DynamicsAuthenticationType', 'OrcCompressionCodec', 'AvroCompressionCodec', + 'TumblingWindowFrequency', + 'BlobEventTypes', + 'DayOfWeek', + 'DaysOfWeek', + 'RecurrenceFrequency', 'DataFlowComputeType', 'AzureFunctionActivityMethod', 'WebActivityMethod', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 95741bbf0fa1..e30a4c509021 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -131,51 +131,6 @@ class DataFlowDebugCommandType(str, Enum): execute_expression_query = "executeExpressionQuery" -class TumblingWindowFrequency(str, Enum): - - minute = "Minute" - hour = "Hour" - - -class BlobEventTypes(str, Enum): - - microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" - microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" - - -class DayOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class DaysOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class RecurrenceFrequency(str, Enum): - - not_specified = "NotSpecified" - minute = "Minute" - hour = "Hour" - day = "Day" - week = "Week" - month = "Month" - year = "Year" - - class GoogleAdWordsAuthenticationType(str, Enum): service_authentication = "ServiceAuthentication" @@ -366,6 +321,51 @@ class AvroCompressionCodec(str, Enum): bzip2 = "bzip2" +class TumblingWindowFrequency(str, Enum): + + minute = "Minute" + hour = "Hour" + + +class BlobEventTypes(str, Enum): + + microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" + microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" + + +class DayOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class DaysOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class RecurrenceFrequency(str, Enum): + + not_specified = "NotSpecified" + minute = "Minute" + hour = "Hour" + day = "Day" + week = "Week" + month = "Month" + year = "Year" + + class DataFlowComputeType(str, Enum): general = "General" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 853ec8158be8..d9d5d3ee8266 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -302,6 +302,28 @@ def __init__(self, **kwargs): self.job_version = kwargs.get('job_version', None) +class AdditionalColumns(Model): + """Specify the column name and value of additional columns. + + :param name: Additional column name. Type: string (or Expression with + resultType string). + :type name: object + :param value: Additional column value. Type: string (or Expression with + resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AdditionalColumns, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + class LinkedService(Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute @@ -748,6 +770,11 @@ class TabularSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -761,6 +788,7 @@ class TabularSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } _subtype_map = { @@ -770,6 +798,7 @@ class TabularSource(CopySource): def __init__(self, **kwargs): super(TabularSource, self).__init__(**kwargs) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'TabularSource' @@ -798,6 +827,11 @@ class AmazonMWSSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -814,6 +848,7 @@ class AmazonMWSSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -922,6 +957,11 @@ class AmazonRedshiftSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -944,6 +984,7 @@ class AmazonRedshiftSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -1347,6 +1388,10 @@ class AmazonS3ReadSettings(StoreReadSettings): :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -1370,6 +1415,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -1381,6 +1427,7 @@ def __init__(self, **kwargs): self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -1784,6 +1831,11 @@ class AvroSource(CopySource): :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -1797,11 +1849,13 @@ class AvroSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(AvroSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'AvroSource' @@ -2249,6 +2303,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2271,6 +2329,7 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -2281,6 +2340,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -2638,6 +2698,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2661,6 +2725,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -2672,6 +2737,7 @@ def __init__(self, **kwargs): self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -3142,6 +3208,11 @@ class AzureDataExplorerSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -3158,6 +3229,7 @@ class AzureDataExplorerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -3165,6 +3237,7 @@ def __init__(self, **kwargs): self.query = kwargs.get('query', None) self.no_truncation = kwargs.get('no_truncation', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'AzureDataExplorerSource' @@ -3533,6 +3606,10 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3555,6 +3632,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -3565,6 +3643,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -3690,6 +3769,11 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str + :param expiry_date_time: Specifies the expiry time of the written files. + The time is applied to the UTC time zone in the format of + "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or + Expression with resultType integer). + :type expiry_date_time: object """ _validation = { @@ -3701,10 +3785,12 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.expiry_date_time = kwargs.get('expiry_date_time', None) self.type = 'AzureDataLakeStoreWriteSettings' @@ -3828,6 +3914,10 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3850,6 +3940,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -3860,6 +3951,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -4194,6 +4286,11 @@ class AzureMariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4210,6 +4307,7 @@ class AzureMariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -4834,6 +4932,11 @@ class AzureMySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -4850,6 +4953,7 @@ class AzureMySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5061,6 +5165,11 @@ class AzurePostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5077,6 +5186,7 @@ class AzurePostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5852,6 +5962,11 @@ class AzureSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -5878,6 +5993,7 @@ class AzureSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -6189,6 +6305,11 @@ class AzureTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -6209,6 +6330,7 @@ class AzureTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -6458,7 +6580,7 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + sub-classes are: ChainingTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when @@ -6497,7 +6619,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'ChainingTrigger': 'ChainingTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, **kwargs): @@ -6930,6 +7052,11 @@ class CassandraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). @@ -6957,6 +7084,7 @@ class CassandraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -7495,6 +7623,11 @@ class CommonDataServiceForAppsSource(CopySource): Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -7508,11 +7641,13 @@ class CommonDataServiceForAppsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(CommonDataServiceForAppsSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'CommonDataServiceForAppsSource' @@ -7712,6 +7847,11 @@ class ConcurSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -7728,6 +7868,7 @@ class ConcurSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -7790,10 +7931,19 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param log_storage_settings: Log storage settings customer need to provide + when enabling session log. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. :type preserve: list[object] + :param validate_data_consistency: Whether to enable Data Consistency + validation. Type: boolean (or Expression with resultType boolean). + :type validate_data_consistency: object + :param skip_error_file: Specify the fault tolerance for data consistency. + :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -7825,8 +7975,11 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, + 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -7842,8 +7995,11 @@ def __init__(self, **kwargs): self.data_integration_units = kwargs.get('data_integration_units', None) self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) + self.validate_data_consistency = kwargs.get('validate_data_consistency', None) + self.skip_error_file = kwargs.get('skip_error_file', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' @@ -8126,6 +8282,11 @@ class CosmosDbMongoDbApiSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8142,6 +8303,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -8150,6 +8312,7 @@ def __init__(self, **kwargs): self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'CosmosDbMongoDbApiSource' @@ -8299,6 +8462,11 @@ class CosmosDbSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8314,6 +8482,7 @@ class CosmosDbSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -8321,6 +8490,7 @@ def __init__(self, **kwargs): self.query = kwargs.get('query', None) self.page_size = kwargs.get('page_size', None) self.preferred_regions = kwargs.get('preferred_regions', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'CosmosDbSqlApiSource' @@ -8407,6 +8577,11 @@ class CouchbaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8423,6 +8598,7 @@ class CouchbaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9925,37 +10101,47 @@ class Db2LinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: The connection string. It is mutually exclusive + with server, database, authenticationType, userName, packageCollection and + certificateCommonName property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType + string). :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). + :param database: Database name for connection. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type database: object :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' + It is mutually exclusive with connectionString property. Possible values + include: 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). + :param username: Username for authentication. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying - database. Type: string (or Expression with resultType string). + database. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type package_collection: object :param certificate_common_name: Certificate Common Name when TLS is - enabled. Type: string (or Expression with resultType string). + enabled. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). + credential manager. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, } _attribute_map = { @@ -9965,6 +10151,7 @@ class Db2LinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, @@ -9977,6 +10164,7 @@ class Db2LinkedService(LinkedService): def __init__(self, **kwargs): super(Db2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.database = kwargs.get('database', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -10013,6 +10201,11 @@ class Db2Source(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -10029,6 +10222,7 @@ class Db2Source(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -10459,6 +10653,11 @@ class DelimitedTextSource(CopySource): :param format_settings: DelimitedText format settings. :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10473,12 +10672,14 @@ class DelimitedTextSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(DelimitedTextSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DelimitedTextSource' @@ -10739,6 +10940,11 @@ class DocumentDbCollectionSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10754,6 +10960,7 @@ class DocumentDbCollectionSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -10761,6 +10968,7 @@ def __init__(self, **kwargs): self.query = kwargs.get('query', None) self.nesting_separator = kwargs.get('nesting_separator', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DocumentDbCollectionSource' @@ -10846,6 +11054,11 @@ class DrillSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10862,6 +11075,7 @@ class DrillSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11157,6 +11371,11 @@ class DynamicsAXSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11173,6 +11392,7 @@ class DynamicsAXSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11463,6 +11683,11 @@ class DynamicsCrmSource(CopySource): Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11476,11 +11701,13 @@ class DynamicsCrmSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(DynamicsCrmSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DynamicsCrmSource' @@ -11762,6 +11989,11 @@ class DynamicsSource(CopySource): Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11775,11 +12007,13 @@ class DynamicsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(DynamicsSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DynamicsSource' @@ -11947,6 +12181,11 @@ class EloquaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11963,6 +12202,7 @@ class EloquaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12821,6 +13061,10 @@ class FileServerReadSettings(StoreReadSettings): :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -12843,6 +13087,7 @@ class FileServerReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -12853,6 +13098,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -13060,6 +13306,11 @@ class FileSystemSource(CopySource): recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -13073,11 +13324,13 @@ class FileSystemSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(FileSystemSource, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'FileSystemSource' @@ -13215,6 +13468,10 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool @@ -13231,6 +13488,7 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } @@ -13239,6 +13497,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.use_binary_transfer = kwargs.get('use_binary_transfer', None) self.type = 'FtpReadSettings' @@ -13695,6 +13954,11 @@ class GoogleAdWordsSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13711,6 +13975,7 @@ class GoogleAdWordsSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13927,6 +14192,11 @@ class GoogleBigQuerySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13943,6 +14213,7 @@ class GoogleBigQuerySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14089,6 +14360,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14112,6 +14387,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -14123,6 +14399,7 @@ def __init__(self, **kwargs): self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -14211,6 +14488,11 @@ class GreenplumSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14227,6 +14509,7 @@ class GreenplumSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14493,6 +14776,11 @@ class HBaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14509,6 +14797,7 @@ class HBaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14645,6 +14934,10 @@ class HdfsReadSettings(StoreReadSettings): :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14669,6 +14962,7 @@ class HdfsReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -14680,6 +14974,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -15721,6 +16016,11 @@ class HiveSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15737,6 +16037,7 @@ class HiveSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16234,6 +16535,11 @@ class HubspotSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16250,6 +16556,7 @@ class HubspotSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16519,6 +16826,11 @@ class ImpalaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16535,6 +16847,7 @@ class ImpalaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16697,6 +17010,11 @@ class InformixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -16713,6 +17031,7 @@ class InformixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17676,6 +17995,11 @@ class JiraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17692,6 +18016,7 @@ class JiraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17924,6 +18249,11 @@ class JsonSource(CopySource): :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -17937,11 +18267,13 @@ class JsonSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(JsonSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'JsonSource' @@ -18490,6 +18822,11 @@ class MagentoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18506,6 +18843,7 @@ class MagentoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18901,6 +19239,11 @@ class MariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18917,6 +19260,7 @@ class MariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19149,6 +19493,11 @@ class MarketoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19165,6 +19514,7 @@ class MarketoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19326,6 +19676,11 @@ class MicrosoftAccessSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19339,11 +19694,13 @@ class MicrosoftAccessSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(MicrosoftAccessSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'MicrosoftAccessSource' @@ -19630,6 +19987,11 @@ class MongoDbSource(CopySource): :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19643,11 +20005,13 @@ class MongoDbSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(MongoDbSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'MongoDbSource' @@ -19804,6 +20168,11 @@ class MongoDbV2Source(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19820,6 +20189,7 @@ class MongoDbV2Source(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -19828,6 +20198,7 @@ def __init__(self, **kwargs): self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'MongoDbV2Source' @@ -19914,6 +20285,11 @@ class MySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -19930,6 +20306,7 @@ class MySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20111,6 +20488,11 @@ class NetezzaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20136,6 +20518,7 @@ class NetezzaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -20418,6 +20801,11 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -20431,11 +20819,13 @@ class ODataSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(ODataSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'ODataSource' @@ -20592,6 +20982,11 @@ class OdbcSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -20608,6 +21003,7 @@ class OdbcSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21355,6 +21751,11 @@ class OracleServiceCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21371,6 +21772,7 @@ class OracleServiceCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21471,6 +21873,11 @@ class OracleSource(CopySource): source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21487,6 +21894,7 @@ class OracleSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -21495,6 +21903,7 @@ def __init__(self, **kwargs): self.query_timeout = kwargs.get('query_timeout', None) self.partition_option = kwargs.get('partition_option', None) self.partition_settings = kwargs.get('partition_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'OracleSource' @@ -21744,6 +22153,11 @@ class OrcSource(CopySource): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21757,11 +22171,13 @@ class OrcSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(OrcSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'OrcSource' @@ -21966,6 +22382,11 @@ class ParquetSource(CopySource): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21979,11 +22400,13 @@ class ParquetSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(ParquetSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'ParquetSource' @@ -22152,6 +22575,11 @@ class PaypalSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22168,6 +22596,7 @@ class PaypalSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22381,6 +22810,11 @@ class PhoenixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22397,6 +22831,7 @@ class PhoenixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22811,6 +23246,11 @@ class PostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -22827,6 +23267,7 @@ class PostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23121,6 +23562,11 @@ class PrestoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23137,6 +23583,7 @@ class PrestoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23319,6 +23766,11 @@ class QuickBooksSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23335,6 +23787,7 @@ class QuickBooksSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23503,6 +23956,11 @@ class RelationalSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -23516,11 +23974,13 @@ class RelationalSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(RelationalSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'RelationalSource' @@ -23584,48 +24044,6 @@ def __init__(self, **kwargs): self.type = 'RelationalTable' -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, **kwargs): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -23650,7 +24068,7 @@ class RerunTumblingWindowTrigger(Trigger): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -23658,17 +24076,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time + windows (ready for execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -23680,7 +24099,7 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } def __init__(self, **kwargs): @@ -23688,45 +24107,10 @@ def __init__(self, **kwargs): self.parent_trigger = kwargs.get('parent_trigger', None) self.requested_start_time = kwargs.get('requested_start_time', None) self.requested_end_time = kwargs.get('requested_end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) + self.rerun_concurrency = kwargs.get('rerun_concurrency', None) self.type = 'RerunTumblingWindowTrigger' -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -23894,6 +24278,11 @@ class ResponsysSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23910,6 +24299,7 @@ class ResponsysSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24138,6 +24528,11 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24156,6 +24551,7 @@ class RestSource(CopySource): 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -24166,6 +24562,7 @@ def __init__(self, **kwargs): self.pagination_rules = kwargs.get('pagination_rules', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'RestSource' @@ -24345,9 +24742,12 @@ class SalesforceLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -24369,6 +24769,7 @@ class SalesforceLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -24378,6 +24779,7 @@ def __init__(self, **kwargs): self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.security_token = kwargs.get('security_token', None) + self.api_version = kwargs.get('api_version', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Salesforce' @@ -24546,6 +24948,11 @@ class SalesforceMarketingCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24562,6 +24969,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24664,9 +25072,12 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). :type extended_properties: object @@ -24691,6 +25102,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -24701,6 +25113,7 @@ def __init__(self, **kwargs): self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.security_token = kwargs.get('security_token', None) + self.api_version = kwargs.get('api_version', None) self.extended_properties = kwargs.get('extended_properties', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SalesforceServiceCloud' @@ -24866,6 +25279,11 @@ class SalesforceServiceCloudSource(CopySource): Query. Possible values include: 'Query', 'QueryAll' :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24880,12 +25298,14 @@ class SalesforceServiceCloudSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(SalesforceServiceCloudSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) self.read_behavior = kwargs.get('read_behavior', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'SalesforceServiceCloudSource' @@ -24986,6 +25406,11 @@ class SalesforceSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -25006,6 +25431,7 @@ class SalesforceSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -25173,6 +25599,11 @@ class SapBwSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). :type query: object @@ -25189,6 +25620,7 @@ class SapBwSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25403,6 +25835,11 @@ class SapCloudForCustomerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25419,6 +25856,7 @@ class SapCloudForCustomerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25578,6 +26016,11 @@ class SapEccSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25594,6 +26037,7 @@ class SapEccSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25627,8 +26071,8 @@ class SapHanaLinkedService(LinkedService): :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). + :param server: Host name of the SAP HANA server. Type: string (or + Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: 'Basic', 'Windows' @@ -25647,7 +26091,6 @@ class SapHanaLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -25719,6 +26162,11 @@ class SapHanaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object @@ -25747,6 +26195,7 @@ class SapHanaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, @@ -25940,6 +26389,11 @@ class SapOpenHubSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). @@ -25962,6 +26416,7 @@ class SapOpenHubSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } @@ -26297,6 +26752,11 @@ class SapTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -26342,6 +26802,7 @@ class SapTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -27032,6 +27493,11 @@ class ServiceNowSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27048,6 +27514,7 @@ class ServiceNowSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27162,6 +27629,10 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -27181,6 +27652,7 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -27190,6 +27662,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'SftpReadSettings' @@ -27322,6 +27795,10 @@ class SftpWriteSettings(StoreWriteSettings): SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :type operation_timeout: object + :param use_temp_file_rename: Upload to temporary file(s) and rename. + Disable this option if your SFTP server doesn't support rename operation. + Type: boolean (or Expression with resultType boolean). + :type use_temp_file_rename: object """ _validation = { @@ -27334,11 +27811,13 @@ class SftpWriteSettings(StoreWriteSettings): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, } def __init__(self, **kwargs): super(SftpWriteSettings, self).__init__(**kwargs) self.operation_timeout = kwargs.get('operation_timeout', None) + self.use_temp_file_rename = kwargs.get('use_temp_file_rename', None) self.type = 'SftpWriteSettings' @@ -27501,6 +27980,11 @@ class ShopifySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27517,6 +28001,7 @@ class ShopifySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27526,6 +28011,29 @@ def __init__(self, **kwargs): self.type = 'ShopifySource' +class SkipErrorFile(Model): + """Skip error file. + + :param file_missing: Skip if file is deleted by other client during copy. + Default is true. Type: boolean (or Expression with resultType boolean). + :type file_missing: object + :param data_inconsistency: Skip if source/sink file changed by other + concurrent write. Default is false. Type: boolean (or Expression with + resultType boolean). + :type data_inconsistency: object + """ + + _attribute_map = { + 'file_missing': {'key': 'fileMissing', 'type': 'object'}, + 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SkipErrorFile, self).__init__(**kwargs) + self.file_missing = kwargs.get('file_missing', None) + self.data_inconsistency = kwargs.get('data_inconsistency', None) + + class SparkLinkedService(LinkedService): """Spark Server linked service. @@ -27740,6 +28248,11 @@ class SparkSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27756,6 +28269,7 @@ class SparkSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27872,6 +28386,11 @@ class SqlDWSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -27897,6 +28416,7 @@ class SqlDWSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, @@ -28016,6 +28536,11 @@ class SqlMISource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28042,6 +28567,7 @@ class SqlMISource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -28225,6 +28751,11 @@ class SqlServerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28251,6 +28782,7 @@ class SqlServerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -28500,6 +29032,11 @@ class SqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28511,6 +29048,12 @@ class SqlSource(TabularSource): procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param isolation_level: Specifies the transaction locking behavior for the + SQL source. Allowed values: + ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The + default value is ReadCommitted. Type: string (or Expression with + resultType string). + :type isolation_level: object """ _validation = { @@ -28524,9 +29067,11 @@ class SqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, } def __init__(self, **kwargs): @@ -28534,6 +29079,7 @@ def __init__(self, **kwargs): self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.isolation_level = kwargs.get('isolation_level', None) self.type = 'SqlSource' @@ -28708,6 +29254,11 @@ class SquareSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28724,6 +29275,7 @@ class SquareSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29617,6 +30169,11 @@ class SybaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -29633,6 +30190,7 @@ class SybaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29829,6 +30387,11 @@ class TeradataSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object @@ -29854,6 +30417,7 @@ class TeradataSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -30053,6 +30617,28 @@ def __init__(self, **kwargs): self.type = 'TriggerDependencyReference' +class TriggerFilterParameters(Model): + """Query parameters for triggers. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger + to get the child rerun triggers + :type parent_trigger_name: str + """ + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.parent_trigger_name = kwargs.get('parent_trigger_name', None) + + class TriggerPipelineReference(Model): """Pipeline that needs to be triggered with the given parameters. @@ -30073,6 +30659,33 @@ def __init__(self, **kwargs): self.parameters = kwargs.get('parameters', None) +class TriggerQueryResponse(Model): + """A query of triggers. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) + + class TriggerReference(Model): """Trigger reference type. @@ -30770,6 +31383,11 @@ class VerticaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30786,6 +31404,7 @@ class VerticaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31345,6 +31964,11 @@ class WebSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -31357,10 +31981,12 @@ class WebSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(WebSource, self).__init__(**kwargs) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'WebSource' @@ -31596,6 +32222,11 @@ class XeroSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31612,6 +32243,7 @@ class XeroSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31779,6 +32411,11 @@ class ZohoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31795,6 +32432,7 @@ class ZohoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 8c7c24c27e52..b39487b10c4f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -302,6 +302,28 @@ def __init__(self, *, job_version: str=None, **kwargs) -> None: self.job_version = job_version +class AdditionalColumns(Model): + """Specify the column name and value of additional columns. + + :param name: Additional column name. Type: string (or Expression with + resultType string). + :type name: object + :param value: Additional column value. Type: string (or Expression with + resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, name=None, value=None, **kwargs) -> None: + super(AdditionalColumns, self).__init__(**kwargs) + self.name = name + self.value = value + + class LinkedService(Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute @@ -748,6 +770,11 @@ class TabularSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -761,15 +788,17 @@ class TabularSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } _subtype_map = { 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'CassandraSource': 'CassandraSource', 'TeradataSource': 'TeradataSource', 'AzureMySqlSource': 'AzureMySqlSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'InformixSource': 'InformixSource', 'AzureTableSource': 'AzureTableSource'} } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'TabularSource' @@ -798,6 +827,11 @@ class AmazonMWSSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -814,11 +848,12 @@ class AmazonMWSSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AmazonMWSSource' @@ -922,6 +957,11 @@ class AmazonRedshiftSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -944,12 +984,13 @@ class AmazonRedshiftSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource' @@ -1347,6 +1388,10 @@ class AmazonS3ReadSettings(StoreReadSettings): :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -1370,17 +1415,19 @@ class AmazonS3ReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -1784,6 +1831,11 @@ class AvroSource(CopySource): :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -1797,11 +1849,13 @@ class AvroSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'AvroSource' @@ -2249,6 +2303,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2271,16 +2329,18 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -2638,6 +2698,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2661,17 +2725,19 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -3142,6 +3208,11 @@ class AzureDataExplorerSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -3158,13 +3229,15 @@ class AzureDataExplorerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.no_truncation = no_truncation self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'AzureDataExplorerSource' @@ -3533,6 +3606,10 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3555,16 +3632,18 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -3690,6 +3769,11 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str + :param expiry_date_time: Specifies the expiry time of the written files. + The time is applied to the UTC time zone in the format of + "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or + Expression with resultType integer). + :type expiry_date_time: object """ _validation = { @@ -3701,10 +3785,12 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, expiry_date_time=None, **kwargs) -> None: super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.expiry_date_time = expiry_date_time self.type = 'AzureDataLakeStoreWriteSettings' @@ -3828,6 +3914,10 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3850,16 +3940,18 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -4194,6 +4286,11 @@ class AzureMariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4210,11 +4307,12 @@ class AzureMariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzureMariaDBSource' @@ -4834,6 +4932,11 @@ class AzureMySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -4850,11 +4953,12 @@ class AzureMySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzureMySqlSource' @@ -5061,6 +5165,11 @@ class AzurePostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5077,11 +5186,12 @@ class AzurePostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzurePostgreSqlSource' @@ -5852,6 +5962,11 @@ class AzureSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -5878,14 +5993,15 @@ class AzureSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -6189,6 +6305,11 @@ class AzureTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -6209,12 +6330,13 @@ class AzureTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource' @@ -6458,7 +6580,7 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + sub-classes are: ChainingTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when @@ -6497,7 +6619,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'ChainingTrigger': 'ChainingTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: @@ -6930,6 +7052,11 @@ class CassandraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). @@ -6957,12 +7084,13 @@ class CassandraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.consistency_level = consistency_level self.type = 'CassandraSource' @@ -7495,6 +7623,11 @@ class CommonDataServiceForAppsSource(CopySource): Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -7508,11 +7641,13 @@ class CommonDataServiceForAppsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'CommonDataServiceForAppsSource' @@ -7712,6 +7847,11 @@ class ConcurSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -7728,11 +7868,12 @@ class ConcurSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ConcurSource' @@ -7790,10 +7931,19 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param log_storage_settings: Log storage settings customer need to provide + when enabling session log. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. :type preserve: list[object] + :param validate_data_consistency: Whether to enable Data Consistency + validation. Type: boolean (or Expression with resultType boolean). + :type validate_data_consistency: object + :param skip_error_file: Specify the fault tolerance for data consistency. + :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -7825,13 +7975,16 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, + 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, log_storage_settings=None, preserve_rules=None, preserve=None, validate_data_consistency=None, skip_error_file=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -7842,8 +7995,11 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.data_integration_units = data_integration_units self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.log_storage_settings = log_storage_settings self.preserve_rules = preserve_rules self.preserve = preserve + self.validate_data_consistency = validate_data_consistency + self.skip_error_file = skip_error_file self.inputs = inputs self.outputs = outputs self.type = 'Copy' @@ -8126,6 +8282,11 @@ class CosmosDbMongoDbApiSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8142,14 +8303,16 @@ class CosmosDbMongoDbApiSource(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'CosmosDbMongoDbApiSource' @@ -8299,6 +8462,11 @@ class CosmosDbSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8314,13 +8482,15 @@ class CosmosDbSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, page_size=None, preferred_regions=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, page_size=None, preferred_regions=None, additional_columns=None, **kwargs) -> None: super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.page_size = page_size self.preferred_regions = preferred_regions + self.additional_columns = additional_columns self.type = 'CosmosDbSqlApiSource' @@ -8407,6 +8577,11 @@ class CouchbaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8423,11 +8598,12 @@ class CouchbaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'CouchbaseSource' @@ -9925,37 +10101,47 @@ class Db2LinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: The connection string. It is mutually exclusive + with server, database, authenticationType, userName, packageCollection and + certificateCommonName property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType + string). :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). + :param database: Database name for connection. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type database: object :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' + It is mutually exclusive with connectionString property. Possible values + include: 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). + :param username: Username for authentication. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying - database. Type: string (or Expression with resultType string). + database. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type package_collection: object :param certificate_common_name: Certificate Common Name when TLS is - enabled. Type: string (or Expression with resultType string). + enabled. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). + credential manager. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, } _attribute_map = { @@ -9965,6 +10151,7 @@ class Db2LinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, @@ -9975,8 +10162,9 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, package_collection=None, certificate_common_name=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, database=None, authentication_type=None, username=None, password=None, package_collection=None, certificate_common_name=None, encrypted_credential=None, **kwargs) -> None: super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.database = database self.authentication_type = authentication_type @@ -10013,6 +10201,11 @@ class Db2Source(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -10029,11 +10222,12 @@ class Db2Source(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'Db2Source' @@ -10459,6 +10653,11 @@ class DelimitedTextSource(CopySource): :param format_settings: DelimitedText format settings. :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10473,12 +10672,14 @@ class DelimitedTextSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, additional_columns=None, **kwargs) -> None: super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings + self.additional_columns = additional_columns self.type = 'DelimitedTextSource' @@ -10739,6 +10940,11 @@ class DocumentDbCollectionSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10754,13 +10960,15 @@ class DocumentDbCollectionSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.nesting_separator = nesting_separator self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'DocumentDbCollectionSource' @@ -10846,6 +11054,11 @@ class DrillSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10862,11 +11075,12 @@ class DrillSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'DrillSource' @@ -11157,6 +11371,11 @@ class DynamicsAXSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11173,11 +11392,12 @@ class DynamicsAXSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'DynamicsAXSource' @@ -11463,6 +11683,11 @@ class DynamicsCrmSource(CopySource): Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11476,11 +11701,13 @@ class DynamicsCrmSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'DynamicsCrmSource' @@ -11762,6 +11989,11 @@ class DynamicsSource(CopySource): Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11775,11 +12007,13 @@ class DynamicsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'DynamicsSource' @@ -11947,6 +12181,11 @@ class EloquaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11963,11 +12202,12 @@ class EloquaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'EloquaSource' @@ -12821,6 +13061,10 @@ class FileServerReadSettings(StoreReadSettings): :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -12843,16 +13087,18 @@ class FileServerReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -13060,6 +13306,11 @@ class FileSystemSource(CopySource): recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -13073,11 +13324,13 @@ class FileSystemSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, additional_columns=None, **kwargs) -> None: super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive + self.additional_columns = additional_columns self.type = 'FileSystemSource' @@ -13215,6 +13468,10 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool @@ -13231,14 +13488,16 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, use_binary_transfer: bool=None, **kwargs) -> None: super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.use_binary_transfer = use_binary_transfer self.type = 'FtpReadSettings' @@ -13695,6 +13954,11 @@ class GoogleAdWordsSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13711,11 +13975,12 @@ class GoogleAdWordsSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GoogleAdWordsSource' @@ -13927,6 +14192,11 @@ class GoogleBigQuerySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13943,11 +14213,12 @@ class GoogleBigQuerySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GoogleBigQuerySource' @@ -14089,6 +14360,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14112,17 +14387,19 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -14211,6 +14488,11 @@ class GreenplumSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14227,11 +14509,12 @@ class GreenplumSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GreenplumSource' @@ -14493,6 +14776,11 @@ class HBaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14509,11 +14797,12 @@ class HBaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HBaseSource' @@ -14645,6 +14934,10 @@ class HdfsReadSettings(StoreReadSettings): :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14669,17 +14962,19 @@ class HdfsReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -15721,6 +16016,11 @@ class HiveSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15737,11 +16037,12 @@ class HiveSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HiveSource' @@ -16234,6 +16535,11 @@ class HubspotSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16250,11 +16556,12 @@ class HubspotSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HubspotSource' @@ -16519,6 +16826,11 @@ class ImpalaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16535,11 +16847,12 @@ class ImpalaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ImpalaSource' @@ -16697,6 +17010,11 @@ class InformixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -16713,11 +17031,12 @@ class InformixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'InformixSource' @@ -17676,6 +17995,11 @@ class JiraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17692,11 +18016,12 @@ class JiraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'JiraSource' @@ -17924,6 +18249,11 @@ class JsonSource(CopySource): :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -17937,11 +18267,13 @@ class JsonSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'JsonSource' @@ -18490,6 +18822,11 @@ class MagentoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18506,11 +18843,12 @@ class MagentoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MagentoSource' @@ -18901,6 +19239,11 @@ class MariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18917,11 +19260,12 @@ class MariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MariaDBSource' @@ -19149,6 +19493,11 @@ class MarketoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19165,11 +19514,12 @@ class MarketoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MarketoSource' @@ -19326,6 +19676,11 @@ class MicrosoftAccessSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19339,11 +19694,13 @@ class MicrosoftAccessSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'MicrosoftAccessSource' @@ -19630,6 +19987,11 @@ class MongoDbSource(CopySource): :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19643,11 +20005,13 @@ class MongoDbSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'MongoDbSource' @@ -19804,6 +20168,11 @@ class MongoDbV2Source(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19820,14 +20189,16 @@ class MongoDbV2Source(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'MongoDbV2Source' @@ -19914,6 +20285,11 @@ class MySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -19930,11 +20306,12 @@ class MySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MySqlSource' @@ -20111,6 +20488,11 @@ class NetezzaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20136,13 +20518,14 @@ class NetezzaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -20418,6 +20801,11 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -20431,11 +20819,13 @@ class ODataSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'ODataSource' @@ -20592,6 +20982,11 @@ class OdbcSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -20608,11 +21003,12 @@ class OdbcSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'OdbcSource' @@ -21355,6 +21751,11 @@ class OracleServiceCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21371,11 +21772,12 @@ class OracleServiceCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'OracleServiceCloudSource' @@ -21471,6 +21873,11 @@ class OracleSource(CopySource): source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21487,14 +21894,16 @@ class OracleSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, additional_columns=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.partition_option = partition_option self.partition_settings = partition_settings + self.additional_columns = additional_columns self.type = 'OracleSource' @@ -21744,6 +22153,11 @@ class OrcSource(CopySource): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21757,11 +22171,13 @@ class OrcSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'OrcSource' @@ -21966,6 +22382,11 @@ class ParquetSource(CopySource): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21979,11 +22400,13 @@ class ParquetSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'ParquetSource' @@ -22152,6 +22575,11 @@ class PaypalSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22168,11 +22596,12 @@ class PaypalSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PaypalSource' @@ -22381,6 +22810,11 @@ class PhoenixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22397,11 +22831,12 @@ class PhoenixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PhoenixSource' @@ -22811,6 +23246,11 @@ class PostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -22827,11 +23267,12 @@ class PostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PostgreSqlSource' @@ -23121,6 +23562,11 @@ class PrestoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23137,11 +23583,12 @@ class PrestoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PrestoSource' @@ -23319,6 +23766,11 @@ class QuickBooksSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23335,11 +23787,12 @@ class QuickBooksSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'QuickBooksSource' @@ -23503,6 +23956,11 @@ class RelationalSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -23516,11 +23974,13 @@ class RelationalSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'RelationalSource' @@ -23584,48 +24044,6 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'RelationalTable' -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = properties - - class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -23650,7 +24068,7 @@ class RerunTumblingWindowTrigger(Trigger): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -23658,17 +24076,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time + windows (ready for execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -23680,53 +24099,18 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + def __init__(self, *, parent_trigger, requested_start_time, requested_end_time, rerun_concurrency: int, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency + self.rerun_concurrency = rerun_concurrency self.type = 'RerunTumblingWindowTrigger' -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -23894,6 +24278,11 @@ class ResponsysSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23910,11 +24299,12 @@ class ResponsysSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ResponsysSource' @@ -24138,6 +24528,11 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24156,9 +24551,10 @@ class RestSource(CopySource): 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, additional_columns=None, **kwargs) -> None: super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.request_body = request_body @@ -24166,6 +24562,7 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.pagination_rules = pagination_rules self.http_request_timeout = http_request_timeout self.request_interval = request_interval + self.additional_columns = additional_columns self.type = 'RestSource' @@ -24345,9 +24742,12 @@ class SalesforceLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -24369,15 +24769,17 @@ class SalesforceLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, api_version=None, encrypted_credential=None, **kwargs) -> None: super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.environment_url = environment_url self.username = username self.password = password self.security_token = security_token + self.api_version = api_version self.encrypted_credential = encrypted_credential self.type = 'Salesforce' @@ -24546,6 +24948,11 @@ class SalesforceMarketingCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24562,11 +24969,12 @@ class SalesforceMarketingCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SalesforceMarketingCloudSource' @@ -24664,9 +25072,12 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). :type extended_properties: object @@ -24691,16 +25102,18 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, api_version=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.environment_url = environment_url self.username = username self.password = password self.security_token = security_token + self.api_version = api_version self.extended_properties = extended_properties self.encrypted_credential = encrypted_credential self.type = 'SalesforceServiceCloud' @@ -24866,6 +25279,11 @@ class SalesforceServiceCloudSource(CopySource): Query. Possible values include: 'Query', 'QueryAll' :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24880,12 +25298,14 @@ class SalesforceServiceCloudSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, additional_columns=None, **kwargs) -> None: super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.read_behavior = read_behavior + self.additional_columns = additional_columns self.type = 'SalesforceServiceCloudSource' @@ -24986,6 +25406,11 @@ class SalesforceSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -25006,12 +25431,13 @@ class SalesforceSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.read_behavior = read_behavior self.type = 'SalesforceSource' @@ -25173,6 +25599,11 @@ class SapBwSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). :type query: object @@ -25189,11 +25620,12 @@ class SapBwSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SapBwSource' @@ -25403,6 +25835,11 @@ class SapCloudForCustomerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25419,11 +25856,12 @@ class SapCloudForCustomerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SapCloudForCustomerSource' @@ -25578,6 +26016,11 @@ class SapEccSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25594,11 +26037,12 @@ class SapEccSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SapEccSource' @@ -25627,8 +26071,8 @@ class SapHanaLinkedService(LinkedService): :param connection_string: SAP HANA ODBC connection string. Type: string, SecureString or AzureKeyVaultSecretReference. :type connection_string: object - :param server: Required. Host name of the SAP HANA server. Type: string - (or Expression with resultType string). + :param server: Host name of the SAP HANA server. Type: string (or + Expression with resultType string). :type server: object :param authentication_type: The authentication type to be used to connect to the SAP HANA server. Possible values include: 'Basic', 'Windows' @@ -25647,7 +26091,6 @@ class SapHanaLinkedService(LinkedService): _validation = { 'type': {'required': True}, - 'server': {'required': True}, } _attribute_map = { @@ -25665,7 +26108,7 @@ class SapHanaLinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, authentication_type=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: super(SapHanaLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.server = server @@ -25719,6 +26162,11 @@ class SapHanaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object @@ -25747,14 +26195,15 @@ class SapHanaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, packet_size=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, packet_size=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.packet_size = packet_size self.partition_option = partition_option @@ -25940,6 +26389,11 @@ class SapOpenHubSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). @@ -25962,12 +26416,13 @@ class SapOpenHubSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id self.type = 'SapOpenHubSource' @@ -26297,6 +26752,11 @@ class SapTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -26342,6 +26802,7 @@ class SapTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -26352,8 +26813,8 @@ class SapTableSource(TabularSource): 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.row_count = row_count self.row_skips = row_skips self.rfc_table_fields = rfc_table_fields @@ -27032,6 +27493,11 @@ class ServiceNowSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27048,11 +27514,12 @@ class ServiceNowSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ServiceNowSource' @@ -27162,6 +27629,10 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -27181,15 +27652,17 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'SftpReadSettings' @@ -27322,6 +27795,10 @@ class SftpWriteSettings(StoreWriteSettings): SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :type operation_timeout: object + :param use_temp_file_rename: Upload to temporary file(s) and rename. + Disable this option if your SFTP server doesn't support rename operation. + Type: boolean (or Expression with resultType boolean). + :type use_temp_file_rename: object """ _validation = { @@ -27334,11 +27811,13 @@ class SftpWriteSettings(StoreWriteSettings): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, operation_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, operation_timeout=None, use_temp_file_rename=None, **kwargs) -> None: super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.operation_timeout = operation_timeout + self.use_temp_file_rename = use_temp_file_rename self.type = 'SftpWriteSettings' @@ -27501,6 +27980,11 @@ class ShopifySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27517,15 +28001,39 @@ class ShopifySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ShopifySource' +class SkipErrorFile(Model): + """Skip error file. + + :param file_missing: Skip if file is deleted by other client during copy. + Default is true. Type: boolean (or Expression with resultType boolean). + :type file_missing: object + :param data_inconsistency: Skip if source/sink file changed by other + concurrent write. Default is false. Type: boolean (or Expression with + resultType boolean). + :type data_inconsistency: object + """ + + _attribute_map = { + 'file_missing': {'key': 'fileMissing', 'type': 'object'}, + 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, + } + + def __init__(self, *, file_missing=None, data_inconsistency=None, **kwargs) -> None: + super(SkipErrorFile, self).__init__(**kwargs) + self.file_missing = file_missing + self.data_inconsistency = data_inconsistency + + class SparkLinkedService(LinkedService): """Spark Server linked service. @@ -27740,6 +28248,11 @@ class SparkSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27756,11 +28269,12 @@ class SparkSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SparkSource' @@ -27872,6 +28386,11 @@ class SqlDWSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -27897,13 +28416,14 @@ class SqlDWSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -28016,6 +28536,11 @@ class SqlMISource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28042,14 +28567,15 @@ class SqlMISource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -28225,6 +28751,11 @@ class SqlServerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28251,14 +28782,15 @@ class SqlServerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -28500,6 +29032,11 @@ class SqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28511,6 +29048,12 @@ class SqlSource(TabularSource): procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param isolation_level: Specifies the transaction locking behavior for the + SQL source. Allowed values: + ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The + default value is ReadCommitted. Type: string (or Expression with + resultType string). + :type isolation_level: object """ _validation = { @@ -28524,16 +29067,19 @@ class SqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, isolation_level=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level self.type = 'SqlSource' @@ -28708,6 +29254,11 @@ class SquareSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28724,11 +29275,12 @@ class SquareSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SquareSource' @@ -29617,6 +30169,11 @@ class SybaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -29633,11 +30190,12 @@ class SybaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SybaseSource' @@ -29829,6 +30387,11 @@ class TeradataSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object @@ -29854,13 +30417,14 @@ class TeradataSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -30053,6 +30617,28 @@ def __init__(self, *, reference_trigger, **kwargs) -> None: self.type = 'TriggerDependencyReference' +class TriggerFilterParameters(Model): + """Query parameters for triggers. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger + to get the child rerun triggers + :type parent_trigger_name: str + """ + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + } + + def __init__(self, *, continuation_token: str=None, parent_trigger_name: str=None, **kwargs) -> None: + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.parent_trigger_name = parent_trigger_name + + class TriggerPipelineReference(Model): """Pipeline that needs to be triggered with the given parameters. @@ -30073,6 +30659,33 @@ def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> Non self.parameters = parameters +class TriggerQueryResponse(Model): + """A query of triggers. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(TriggerQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + class TriggerReference(Model): """Trigger reference type. @@ -30770,6 +31383,11 @@ class VerticaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30786,11 +31404,12 @@ class VerticaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'VerticaSource' @@ -31345,6 +31964,11 @@ class WebSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -31357,10 +31981,12 @@ class WebSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, additional_columns=None, **kwargs) -> None: super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.additional_columns = additional_columns self.type = 'WebSource' @@ -31596,6 +32222,11 @@ class XeroSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31612,11 +32243,12 @@ class XeroSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'XeroSource' @@ -31779,6 +32411,11 @@ class ZohoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31795,10 +32432,11 @@ class ZohoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py index f78455cfdb9a..9a46a2afb4ca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py @@ -103,19 +103,6 @@ class TriggerResourcePaged(Paged): def __init__(self, *args, **kwargs): super(TriggerResourcePaged, self).__init__(*args, **kwargs) -class RerunTriggerResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`RerunTriggerResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} - } - - def __init__(self, *args, **kwargs): - - super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) class DataFlowResourcePaged(Paged): """ A paging container for iterating over a list of :class:`DataFlowResource ` object diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 619150f2d6a8..59e9feaff462 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -22,7 +22,6 @@ from ._activity_runs_operations import ActivityRunsOperations from ._triggers_operations import TriggersOperations from ._trigger_runs_operations import TriggerRunsOperations -from ._rerun_triggers_operations import RerunTriggersOperations from ._data_flows_operations import DataFlowsOperations from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations @@ -40,7 +39,6 @@ 'ActivityRunsOperations', 'TriggersOperations', 'TriggerRunsOperations', - 'RerunTriggersOperations', 'DataFlowsOperations', 'DataFlowDebugSessionOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index 00201749beee..1f24fba6ac9c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -314,7 +314,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} def create_run( - self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, parameters=None, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, start_from_failure=None, parameters=None, custom_headers=None, raw=False, **operation_config): """Creates a run of a pipeline. :param resource_group_name: The resource group name. @@ -334,6 +334,10 @@ def create_run( :param start_activity_name: In recovery mode, the rerun will start from this activity. If not specified, all activities will run. :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun + will start from failed activities. The property will be used only if + startActivityName is not specified. + :type start_from_failure: bool :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. :type parameters: dict[str, object] @@ -366,6 +370,8 @@ def create_run( query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') if start_activity_name is not None: query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') + if start_from_failure is not None: + query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') # Construct headers header_parameters = {} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py deleted file mode 100644 index 6d5f8e9831de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py +++ /dev/null @@ -1,453 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -import uuid -from msrest.pipeline import ClientRawResponse -from msrestazure.azure_exceptions import CloudError -from msrest.polling import LROPoller, NoPolling -from msrestazure.polling.arm_polling import ARMPolling - -from .. import models - - -class RerunTriggersOperations(object): - """RerunTriggersOperations operations. - - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - :ivar api_version: The API version. Constant value: "2018-06-01". - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self.api_version = "2018-06-01" - - self.config = config - - def create( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, rerun_tumbling_window_trigger_action_parameters, custom_headers=None, raw=False, **operation_config): - """Creates a rerun trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param rerun_tumbling_window_trigger_action_parameters: Rerun tumbling - window trigger action parameters. - :type rerun_tumbling_window_trigger_action_parameters: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTriggerActionParameters - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: TriggerResource or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.datafactory.models.TriggerResource or - ~msrest.pipeline.ClientRawResponse - :raises: :class:`CloudError` - """ - # Construct URL - url = self.create.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct body - body_content = self._serialize.body(rerun_tumbling_window_trigger_action_parameters, 'RerunTumblingWindowTriggerActionParameters') - - # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}'} - - - def _start_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.start.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def start( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/start'} - - - def _stop_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.stop.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def stop( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/stop'} - - - def _cancel_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.cancel.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def cancel( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Cancels a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._cancel_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/cancel'} - - def list_by_trigger( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): - """Lists rerun triggers by an original trigger name. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: An iterator like instance of RerunTriggerResource - :rtype: - ~azure.mgmt.datafactory.models.RerunTriggerResourcePaged[~azure.mgmt.datafactory.models.RerunTriggerResource] - :raises: :class:`CloudError` - """ - def prepare_request(next_link=None): - if not next_link: - # Construct URL - url = self.list_by_trigger.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - else: - url = next_link - query_parameters = {} - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - return response - - # Deserialize response - header_dict = None - if raw: - header_dict = {} - deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - - return deserialized - list_by_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index 57e31b1bd8c9..4554f5f7f71b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -113,6 +113,79 @@ def internal_paging(next_link=None): return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} + def query_by_factory( + self, resource_group_name, factory_name, continuation_token=None, parent_trigger_name=None, custom_headers=None, raw=False, **operation_config): + """Query triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent + TumblingWindowTrigger to get the child rerun triggers + :type parent_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerQueryResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token, parent_trigger_name=parent_trigger_name) + + # Construct URL + url = self.query_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerQueryResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} + def create_or_update( self, resource_group_name, factory_name, trigger_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): """Creates or updates a trigger.