From a3df16a2165a99c105b3fa9ae516c7c6aa28dcfd Mon Sep 17 00:00:00 2001 From: SDK Automation Date: Mon, 9 Mar 2020 07:47:04 +0000 Subject: [PATCH] Generated from 700e3874a1554d3918adebf8c57526cf48deb625 fix prettry check --- .../_data_factory_management_client.py | 5 - .../azure/mgmt/datafactory/models/__init__.py | 64 +- .../_data_factory_management_client_enums.py | 90 +- .../azure/mgmt/datafactory/models/_models.py | 1298 +++++++++++-- .../mgmt/datafactory/models/_models_py3.py | 1606 ++++++++++++++--- .../mgmt/datafactory/models/_paged_models.py | 13 - .../mgmt/datafactory/operations/__init__.py | 2 - .../operations/_pipelines_operations.py | 8 +- .../operations/_rerun_triggers_operations.py | 453 ----- .../operations/_triggers_operations.py | 73 + 10 files changed, 2649 insertions(+), 963 deletions(-) delete mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index ec8185523fbd..78ff241e7898 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -26,7 +26,6 @@ from .operations import ActivityRunsOperations from .operations import TriggersOperations from .operations import TriggerRunsOperations -from .operations import RerunTriggersOperations from .operations import DataFlowsOperations from .operations import DataFlowDebugSessionOperations from . import models @@ -64,8 +63,6 @@ class DataFactoryManagementClient(SDKClient): :vartype triggers: azure.mgmt.datafactory.operations.TriggersOperations :ivar trigger_runs: TriggerRuns operations :vartype trigger_runs: azure.mgmt.datafactory.operations.TriggerRunsOperations - :ivar rerun_triggers: RerunTriggers operations - :vartype rerun_triggers: azure.mgmt.datafactory.operations.RerunTriggersOperations :ivar data_flows: DataFlows operations :vartype data_flows: azure.mgmt.datafactory.operations.DataFlowsOperations :ivar data_flow_debug_session: DataFlowDebugSession operations @@ -116,8 +113,6 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.trigger_runs = TriggerRunsOperations( self._client, self.config, self._serialize, self._deserialize) - self.rerun_triggers = RerunTriggersOperations( - self._client, self.config, self._serialize, self._deserialize) self.data_flows = DataFlowsOperations( self._client, self.config, self._serialize, self._deserialize) self.data_flow_debug_session = DataFlowDebugSessionOperations( diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 4e9756b6aad1..7cd1c2853bcc 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -17,6 +17,7 @@ from ._models_py3 import ActivityRun from ._models_py3 import ActivityRunsQueryResponse from ._models_py3 import AddDataFlowToDebugSessionResponse + from ._models_py3 import AdditionalColumns from ._models_py3 import AmazonMWSLinkedService from ._models_py3 import AmazonMWSObjectDataset from ._models_py3 import AmazonMWSSource @@ -217,6 +218,7 @@ from ._models_py3 import ExecutePipelineActivity from ._models_py3 import ExecuteSSISPackageActivity from ._models_py3 import ExecutionActivity + from ._models_py3 import ExportSettings from ._models_py3 import ExposureControlRequest from ._models_py3 import ExposureControlResponse from ._models_py3 import Expression @@ -287,6 +289,7 @@ from ._models_py3 import ImpalaLinkedService from ._models_py3 import ImpalaObjectDataset from ._models_py3 import ImpalaSource + from ._models_py3 import ImportSettings from ._models_py3 import InformixLinkedService from ._models_py3 import InformixSink from ._models_py3 import InformixSource @@ -426,9 +429,7 @@ from ._models_py3 import RedshiftUnloadSettings from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset - from ._models_py3 import RerunTriggerResource from ._models_py3 import RerunTumblingWindowTrigger - from ._models_py3 import RerunTumblingWindowTriggerActionParameters from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService from ._models_py3 import ResponsysObjectDataset @@ -492,6 +493,13 @@ from ._models_py3 import ShopifyLinkedService from ._models_py3 import ShopifyObjectDataset from ._models_py3 import ShopifySource + from ._models_py3 import SkipErrorFile + from ._models_py3 import SnowflakeDataset + from ._models_py3 import SnowflakeExportCopyCommand + from ._models_py3 import SnowflakeImportCopyCommand + from ._models_py3 import SnowflakeLinkedService + from ._models_py3 import SnowflakeSink + from ._models_py3 import SnowflakeSource from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource @@ -546,7 +554,9 @@ from ._models_py3 import Transformation from ._models_py3 import Trigger from ._models_py3 import TriggerDependencyReference + from ._models_py3 import TriggerFilterParameters from ._models_py3 import TriggerPipelineReference + from ._models_py3 import TriggerQueryResponse from ._models_py3 import TriggerReference from ._models_py3 import TriggerResource from ._models_py3 import TriggerRun @@ -589,6 +599,7 @@ from ._models import ActivityRun from ._models import ActivityRunsQueryResponse from ._models import AddDataFlowToDebugSessionResponse + from ._models import AdditionalColumns from ._models import AmazonMWSLinkedService from ._models import AmazonMWSObjectDataset from ._models import AmazonMWSSource @@ -789,6 +800,7 @@ from ._models import ExecutePipelineActivity from ._models import ExecuteSSISPackageActivity from ._models import ExecutionActivity + from ._models import ExportSettings from ._models import ExposureControlRequest from ._models import ExposureControlResponse from ._models import Expression @@ -859,6 +871,7 @@ from ._models import ImpalaLinkedService from ._models import ImpalaObjectDataset from ._models import ImpalaSource + from ._models import ImportSettings from ._models import InformixLinkedService from ._models import InformixSink from ._models import InformixSource @@ -998,9 +1011,7 @@ from ._models import RedshiftUnloadSettings from ._models import RelationalSource from ._models import RelationalTableDataset - from ._models import RerunTriggerResource from ._models import RerunTumblingWindowTrigger - from ._models import RerunTumblingWindowTriggerActionParameters from ._models import Resource from ._models import ResponsysLinkedService from ._models import ResponsysObjectDataset @@ -1064,6 +1075,13 @@ from ._models import ShopifyLinkedService from ._models import ShopifyObjectDataset from ._models import ShopifySource + from ._models import SkipErrorFile + from ._models import SnowflakeDataset + from ._models import SnowflakeExportCopyCommand + from ._models import SnowflakeImportCopyCommand + from ._models import SnowflakeLinkedService + from ._models import SnowflakeSink + from ._models import SnowflakeSource from ._models import SparkLinkedService from ._models import SparkObjectDataset from ._models import SparkSource @@ -1118,7 +1136,9 @@ from ._models import Transformation from ._models import Trigger from ._models import TriggerDependencyReference + from ._models import TriggerFilterParameters from ._models import TriggerPipelineReference + from ._models import TriggerQueryResponse from ._models import TriggerReference from ._models import TriggerResource from ._models import TriggerRun @@ -1161,7 +1181,6 @@ from ._paged_models import LinkedServiceResourcePaged from ._paged_models import OperationPaged from ._paged_models import PipelineResourcePaged -from ._paged_models import RerunTriggerResourcePaged from ._paged_models import TriggerResourcePaged from ._data_factory_management_client_enums import ( IntegrationRuntimeState, @@ -1177,11 +1196,6 @@ RunQueryOrder, TriggerRunStatus, DataFlowDebugCommandType, - TumblingWindowFrequency, - BlobEventTypes, - DayOfWeek, - DaysOfWeek, - RecurrenceFrequency, GoogleAdWordsAuthenticationType, SparkServerType, SparkThriftTransportProtocol, @@ -1210,6 +1224,11 @@ DynamicsAuthenticationType, OrcCompressionCodec, AvroCompressionCodec, + TumblingWindowFrequency, + BlobEventTypes, + DayOfWeek, + DaysOfWeek, + RecurrenceFrequency, DataFlowComputeType, AzureFunctionActivityMethod, WebActivityMethod, @@ -1250,6 +1269,7 @@ 'ActivityRun', 'ActivityRunsQueryResponse', 'AddDataFlowToDebugSessionResponse', + 'AdditionalColumns', 'AmazonMWSLinkedService', 'AmazonMWSObjectDataset', 'AmazonMWSSource', @@ -1450,6 +1470,7 @@ 'ExecutePipelineActivity', 'ExecuteSSISPackageActivity', 'ExecutionActivity', + 'ExportSettings', 'ExposureControlRequest', 'ExposureControlResponse', 'Expression', @@ -1520,6 +1541,7 @@ 'ImpalaLinkedService', 'ImpalaObjectDataset', 'ImpalaSource', + 'ImportSettings', 'InformixLinkedService', 'InformixSink', 'InformixSource', @@ -1659,9 +1681,7 @@ 'RedshiftUnloadSettings', 'RelationalSource', 'RelationalTableDataset', - 'RerunTriggerResource', 'RerunTumblingWindowTrigger', - 'RerunTumblingWindowTriggerActionParameters', 'Resource', 'ResponsysLinkedService', 'ResponsysObjectDataset', @@ -1725,6 +1745,13 @@ 'ShopifyLinkedService', 'ShopifyObjectDataset', 'ShopifySource', + 'SkipErrorFile', + 'SnowflakeDataset', + 'SnowflakeExportCopyCommand', + 'SnowflakeImportCopyCommand', + 'SnowflakeLinkedService', + 'SnowflakeSink', + 'SnowflakeSource', 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', @@ -1779,7 +1806,9 @@ 'Transformation', 'Trigger', 'TriggerDependencyReference', + 'TriggerFilterParameters', 'TriggerPipelineReference', + 'TriggerQueryResponse', 'TriggerReference', 'TriggerResource', 'TriggerRun', @@ -1821,7 +1850,6 @@ 'DatasetResourcePaged', 'PipelineResourcePaged', 'TriggerResourcePaged', - 'RerunTriggerResourcePaged', 'DataFlowResourcePaged', 'DataFlowDebugSessionInfoPaged', 'IntegrationRuntimeState', @@ -1837,11 +1865,6 @@ 'RunQueryOrder', 'TriggerRunStatus', 'DataFlowDebugCommandType', - 'TumblingWindowFrequency', - 'BlobEventTypes', - 'DayOfWeek', - 'DaysOfWeek', - 'RecurrenceFrequency', 'GoogleAdWordsAuthenticationType', 'SparkServerType', 'SparkThriftTransportProtocol', @@ -1870,6 +1893,11 @@ 'DynamicsAuthenticationType', 'OrcCompressionCodec', 'AvroCompressionCodec', + 'TumblingWindowFrequency', + 'BlobEventTypes', + 'DayOfWeek', + 'DaysOfWeek', + 'RecurrenceFrequency', 'DataFlowComputeType', 'AzureFunctionActivityMethod', 'WebActivityMethod', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 95741bbf0fa1..e30a4c509021 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -131,51 +131,6 @@ class DataFlowDebugCommandType(str, Enum): execute_expression_query = "executeExpressionQuery" -class TumblingWindowFrequency(str, Enum): - - minute = "Minute" - hour = "Hour" - - -class BlobEventTypes(str, Enum): - - microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" - microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" - - -class DayOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class DaysOfWeek(str, Enum): - - sunday = "Sunday" - monday = "Monday" - tuesday = "Tuesday" - wednesday = "Wednesday" - thursday = "Thursday" - friday = "Friday" - saturday = "Saturday" - - -class RecurrenceFrequency(str, Enum): - - not_specified = "NotSpecified" - minute = "Minute" - hour = "Hour" - day = "Day" - week = "Week" - month = "Month" - year = "Year" - - class GoogleAdWordsAuthenticationType(str, Enum): service_authentication = "ServiceAuthentication" @@ -366,6 +321,51 @@ class AvroCompressionCodec(str, Enum): bzip2 = "bzip2" +class TumblingWindowFrequency(str, Enum): + + minute = "Minute" + hour = "Hour" + + +class BlobEventTypes(str, Enum): + + microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" + microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" + + +class DayOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class DaysOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + + +class RecurrenceFrequency(str, Enum): + + not_specified = "NotSpecified" + minute = "Minute" + hour = "Hour" + day = "Day" + week = "Week" + month = "Month" + year = "Year" + + class DataFlowComputeType(str, Enum): general = "General" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 853ec8158be8..60a20eb56e92 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -302,13 +302,35 @@ def __init__(self, **kwargs): self.job_version = kwargs.get('job_version', None) +class AdditionalColumns(Model): + """Specify the column name and value of additional columns. + + :param name: Additional column name. Type: string (or Expression with + resultType string). + :type name: object + :param value: Additional column value. Type: string (or Expression with + resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AdditionalColumns, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.value = kwargs.get('value', None) + + class LinkedService(Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, + sub-classes are: SnowflakeLinkedService, AzureFunctionLinkedService, AzureDataExplorerLinkedService, SapTableLinkedService, GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, DynamicsAXLinkedService, ResponsysLinkedService, @@ -380,7 +402,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): @@ -492,28 +514,29 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, - OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, - ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, - VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, - XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, - ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, - PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, - MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, - HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, - GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, - DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, - AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, - AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, - RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, - SapHanaTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + sub-classes are: SnowflakeDataset, GoogleAdWordsObjectDataset, + AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, + DynamicsAXResourceDataset, ResponsysObjectDataset, + SalesforceMarketingCloudObjectDataset, VerticaTableDataset, + NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, + SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, + ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, + PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, + AzureMariaDBTableDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, SapTableResourceDataset, RestResourceDataset, + SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, + SapEccResourceDataset, SapCloudForCustomerResourceDataset, + SapBwCubeDataset, SybaseTableDataset, SalesforceServiceCloudObjectDataset, + SalesforceObjectDataset, MicrosoftAccessTableDataset, + PostgreSqlTableDataset, MySqlTableDataset, OdbcTableDataset, + InformixTableDataset, RelationalTableDataset, Db2TableDataset, + AmazonRedshiftTableDataset, AzureMySqlTableDataset, TeradataTableDataset, + OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, @@ -573,7 +596,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): @@ -653,14 +676,15 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, - Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, - WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, - FileSystemSource, RestSource, SalesforceServiceCloudSource, ODataSource, - MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, - DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, - DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, - OrcSource, JsonSource, DelimitedTextSource, ParquetSource, AvroSource + sub-classes are: SnowflakeSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, WebSource, OracleSource, + AzureDataExplorerSource, HdfsSource, FileSystemSource, RestSource, + SalesforceServiceCloudSource, ODataSource, MicrosoftAccessSource, + RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, + DynamicsSource, CosmosDbSqlApiSource, DocumentDbCollectionSource, + BlobSource, TabularSource, BinarySource, OrcSource, JsonSource, + DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -695,7 +719,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): @@ -748,6 +772,11 @@ class TabularSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -761,6 +790,7 @@ class TabularSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } _subtype_map = { @@ -770,6 +800,7 @@ class TabularSource(CopySource): def __init__(self, **kwargs): super(TabularSource, self).__init__(**kwargs) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'TabularSource' @@ -798,6 +829,11 @@ class AmazonMWSSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -814,6 +850,7 @@ class AmazonMWSSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -922,6 +959,11 @@ class AmazonRedshiftSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -944,6 +986,7 @@ class AmazonRedshiftSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } @@ -1347,6 +1390,10 @@ class AmazonS3ReadSettings(StoreReadSettings): :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -1370,6 +1417,7 @@ class AmazonS3ReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -1381,6 +1429,7 @@ def __init__(self, **kwargs): self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -1642,7 +1691,7 @@ class CopySink(Model): SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, OrcSink, @@ -1690,7 +1739,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -1784,6 +1833,11 @@ class AvroSource(CopySource): :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -1797,11 +1851,13 @@ class AvroSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(AvroSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'AvroSource' @@ -2249,6 +2305,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2271,6 +2331,7 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -2281,6 +2342,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -2638,6 +2700,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2661,6 +2727,7 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -2672,6 +2739,7 @@ def __init__(self, **kwargs): self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -3142,6 +3210,11 @@ class AzureDataExplorerSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -3158,6 +3231,7 @@ class AzureDataExplorerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -3165,6 +3239,7 @@ def __init__(self, **kwargs): self.query = kwargs.get('query', None) self.no_truncation = kwargs.get('no_truncation', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'AzureDataExplorerSource' @@ -3533,6 +3608,10 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3555,6 +3634,7 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -3565,6 +3645,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -3690,6 +3771,11 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str + :param expiry_date_time: Specifies the expiry time of the written files. + The time is applied to the UTC time zone in the format of + "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or + Expression with resultType integer). + :type expiry_date_time: object """ _validation = { @@ -3701,10 +3787,12 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureDataLakeStoreWriteSettings, self).__init__(**kwargs) + self.expiry_date_time = kwargs.get('expiry_date_time', None) self.type = 'AzureDataLakeStoreWriteSettings' @@ -3828,6 +3916,10 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3850,6 +3942,7 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -3860,6 +3953,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -4194,6 +4288,11 @@ class AzureMariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4210,6 +4309,7 @@ class AzureMariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -4834,6 +4934,11 @@ class AzureMySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -4850,6 +4955,7 @@ class AzureMySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5061,6 +5167,11 @@ class AzurePostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5077,6 +5188,7 @@ class AzurePostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -5852,6 +5964,11 @@ class AzureSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -5878,6 +5995,7 @@ class AzureSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -6189,6 +6307,11 @@ class AzureTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -6209,6 +6332,7 @@ class AzureTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } @@ -6458,7 +6582,7 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + sub-classes are: ChainingTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when @@ -6497,7 +6621,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'ChainingTrigger': 'ChainingTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, **kwargs): @@ -6930,6 +7054,11 @@ class CassandraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). @@ -6957,6 +7086,7 @@ class CassandraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } @@ -7495,6 +7625,11 @@ class CommonDataServiceForAppsSource(CopySource): Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -7508,11 +7643,13 @@ class CommonDataServiceForAppsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(CommonDataServiceForAppsSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'CommonDataServiceForAppsSource' @@ -7712,6 +7849,11 @@ class ConcurSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -7728,6 +7870,7 @@ class ConcurSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -7790,10 +7933,19 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param log_storage_settings: Log storage settings customer need to provide + when enabling session log. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. :type preserve: list[object] + :param validate_data_consistency: Whether to enable Data Consistency + validation. Type: boolean (or Expression with resultType boolean). + :type validate_data_consistency: object + :param skip_error_file: Specify the fault tolerance for data consistency. + :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -7825,8 +7977,11 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, + 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } @@ -7842,8 +7997,11 @@ def __init__(self, **kwargs): self.data_integration_units = kwargs.get('data_integration_units', None) self.enable_skip_incompatible_row = kwargs.get('enable_skip_incompatible_row', None) self.redirect_incompatible_row_settings = kwargs.get('redirect_incompatible_row_settings', None) + self.log_storage_settings = kwargs.get('log_storage_settings', None) self.preserve_rules = kwargs.get('preserve_rules', None) self.preserve = kwargs.get('preserve', None) + self.validate_data_consistency = kwargs.get('validate_data_consistency', None) + self.skip_error_file = kwargs.get('skip_error_file', None) self.inputs = kwargs.get('inputs', None) self.outputs = kwargs.get('outputs', None) self.type = 'Copy' @@ -8126,6 +8284,11 @@ class CosmosDbMongoDbApiSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8142,6 +8305,7 @@ class CosmosDbMongoDbApiSource(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -8150,6 +8314,7 @@ def __init__(self, **kwargs): self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'CosmosDbMongoDbApiSource' @@ -8299,6 +8464,11 @@ class CosmosDbSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8314,6 +8484,7 @@ class CosmosDbSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -8321,6 +8492,7 @@ def __init__(self, **kwargs): self.query = kwargs.get('query', None) self.page_size = kwargs.get('page_size', None) self.preferred_regions = kwargs.get('preferred_regions', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'CosmosDbSqlApiSource' @@ -8407,6 +8579,11 @@ class CouchbaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8423,6 +8600,7 @@ class CouchbaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -9925,37 +10103,47 @@ class Db2LinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: The connection string. It is mutually exclusive + with server, database, authenticationType, userName, packageCollection and + certificateCommonName property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType + string). :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). + :param database: Database name for connection. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type database: object :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' + It is mutually exclusive with connectionString property. Possible values + include: 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). + :param username: Username for authentication. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying - database. Type: string (or Expression with resultType string). + database. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type package_collection: object :param certificate_common_name: Certificate Common Name when TLS is - enabled. Type: string (or Expression with resultType string). + enabled. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). + credential manager. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, } _attribute_map = { @@ -9965,6 +10153,7 @@ class Db2LinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, @@ -9977,6 +10166,7 @@ class Db2LinkedService(LinkedService): def __init__(self, **kwargs): super(Db2LinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) self.server = kwargs.get('server', None) self.database = kwargs.get('database', None) self.authentication_type = kwargs.get('authentication_type', None) @@ -10013,6 +10203,11 @@ class Db2Source(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -10029,6 +10224,7 @@ class Db2Source(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -10459,6 +10655,11 @@ class DelimitedTextSource(CopySource): :param format_settings: DelimitedText format settings. :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10473,12 +10674,14 @@ class DelimitedTextSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(DelimitedTextSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) self.format_settings = kwargs.get('format_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DelimitedTextSource' @@ -10739,6 +10942,11 @@ class DocumentDbCollectionSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10754,6 +10962,7 @@ class DocumentDbCollectionSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -10761,6 +10970,7 @@ def __init__(self, **kwargs): self.query = kwargs.get('query', None) self.nesting_separator = kwargs.get('nesting_separator', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DocumentDbCollectionSource' @@ -10846,6 +11056,11 @@ class DrillSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10862,6 +11077,7 @@ class DrillSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11157,6 +11373,11 @@ class DynamicsAXSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11173,6 +11394,7 @@ class DynamicsAXSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -11463,6 +11685,11 @@ class DynamicsCrmSource(CopySource): Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11476,11 +11703,13 @@ class DynamicsCrmSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(DynamicsCrmSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DynamicsCrmSource' @@ -11762,6 +11991,11 @@ class DynamicsSource(CopySource): Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11775,11 +12009,13 @@ class DynamicsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(DynamicsSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'DynamicsSource' @@ -11947,6 +12183,11 @@ class EloquaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11963,6 +12204,7 @@ class EloquaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -12280,6 +12522,40 @@ def __init__(self, **kwargs): self.type = 'ExecuteSSISPackage' +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__(self, **kwargs): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + class ExposureControlRequest(Model): """The exposure control request. @@ -12821,6 +13097,10 @@ class FileServerReadSettings(StoreReadSettings): :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -12843,6 +13123,7 @@ class FileServerReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -12853,6 +13134,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -13060,6 +13342,11 @@ class FileSystemSource(CopySource): recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -13073,11 +13360,13 @@ class FileSystemSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(FileSystemSource, self).__init__(**kwargs) self.recursive = kwargs.get('recursive', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'FileSystemSource' @@ -13215,6 +13504,10 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool @@ -13231,6 +13524,7 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } @@ -13239,6 +13533,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.use_binary_transfer = kwargs.get('use_binary_transfer', None) self.type = 'FtpReadSettings' @@ -13695,6 +13990,11 @@ class GoogleAdWordsSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13711,6 +14011,7 @@ class GoogleAdWordsSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -13927,6 +14228,11 @@ class GoogleBigQuerySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13943,6 +14249,7 @@ class GoogleBigQuerySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14089,6 +14396,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14112,6 +14423,7 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -14123,6 +14435,7 @@ def __init__(self, **kwargs): self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -14211,6 +14524,11 @@ class GreenplumSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14227,6 +14545,7 @@ class GreenplumSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14493,6 +14812,11 @@ class HBaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14509,6 +14833,7 @@ class HBaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -14645,6 +14970,10 @@ class HdfsReadSettings(StoreReadSettings): :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14669,6 +14998,7 @@ class HdfsReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, @@ -14680,6 +15010,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) @@ -15721,6 +16052,11 @@ class HiveSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15737,6 +16073,7 @@ class HiveSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16234,6 +16571,11 @@ class HubspotSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16250,6 +16592,7 @@ class HubspotSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16519,6 +16862,11 @@ class ImpalaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16535,6 +16883,7 @@ class ImpalaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -16544,6 +16893,40 @@ def __init__(self, **kwargs): self.type = 'ImpalaSource' +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, **kwargs): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + class InformixLinkedService(LinkedService): """Informix linked service. @@ -16697,6 +17080,11 @@ class InformixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -16713,6 +17101,7 @@ class InformixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17676,6 +18065,11 @@ class JiraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17692,6 +18086,7 @@ class JiraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -17924,6 +18319,11 @@ class JsonSource(CopySource): :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -17937,11 +18337,13 @@ class JsonSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(JsonSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'JsonSource' @@ -18490,6 +18892,11 @@ class MagentoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18506,6 +18913,7 @@ class MagentoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -18901,6 +19309,11 @@ class MariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18917,6 +19330,7 @@ class MariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19149,6 +19563,11 @@ class MarketoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19165,6 +19584,7 @@ class MarketoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -19326,6 +19746,11 @@ class MicrosoftAccessSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19339,11 +19764,13 @@ class MicrosoftAccessSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(MicrosoftAccessSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'MicrosoftAccessSource' @@ -19630,6 +20057,11 @@ class MongoDbSource(CopySource): :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19643,11 +20075,13 @@ class MongoDbSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(MongoDbSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'MongoDbSource' @@ -19804,6 +20238,11 @@ class MongoDbV2Source(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19820,6 +20259,7 @@ class MongoDbV2Source(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -19828,6 +20268,7 @@ def __init__(self, **kwargs): self.cursor_methods = kwargs.get('cursor_methods', None) self.batch_size = kwargs.get('batch_size', None) self.query_timeout = kwargs.get('query_timeout', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'MongoDbV2Source' @@ -19914,6 +20355,11 @@ class MySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -19930,6 +20376,7 @@ class MySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -20111,6 +20558,11 @@ class NetezzaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20136,6 +20588,7 @@ class NetezzaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, @@ -20418,6 +20871,11 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -20431,11 +20889,13 @@ class ODataSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(ODataSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'ODataSource' @@ -20592,6 +21052,11 @@ class OdbcSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -20608,6 +21073,7 @@ class OdbcSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21355,6 +21821,11 @@ class OracleServiceCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21371,6 +21842,7 @@ class OracleServiceCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -21471,6 +21943,11 @@ class OracleSource(CopySource): source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21487,6 +21964,7 @@ class OracleSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -21495,6 +21973,7 @@ def __init__(self, **kwargs): self.query_timeout = kwargs.get('query_timeout', None) self.partition_option = kwargs.get('partition_option', None) self.partition_settings = kwargs.get('partition_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'OracleSource' @@ -21744,6 +22223,11 @@ class OrcSource(CopySource): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21757,11 +22241,13 @@ class OrcSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(OrcSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'OrcSource' @@ -21966,6 +22452,11 @@ class ParquetSource(CopySource): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21979,11 +22470,13 @@ class ParquetSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(ParquetSource, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'ParquetSource' @@ -22152,6 +22645,11 @@ class PaypalSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22168,6 +22666,7 @@ class PaypalSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22381,6 +22880,11 @@ class PhoenixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22397,6 +22901,7 @@ class PhoenixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -22811,6 +23316,11 @@ class PostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -22827,6 +23337,7 @@ class PostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23121,6 +23632,11 @@ class PrestoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23137,6 +23653,7 @@ class PrestoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23319,6 +23836,11 @@ class QuickBooksSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23335,6 +23857,7 @@ class QuickBooksSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -23503,6 +24026,11 @@ class RelationalSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -23516,11 +24044,13 @@ class RelationalSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(RelationalSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'RelationalSource' @@ -23584,48 +24114,6 @@ def __init__(self, **kwargs): self.type = 'RelationalTable' -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, **kwargs): - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -23650,7 +24138,7 @@ class RerunTumblingWindowTrigger(Trigger): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -23658,17 +24146,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time + windows (ready for execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -23680,7 +24169,7 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } def __init__(self, **kwargs): @@ -23688,45 +24177,10 @@ def __init__(self, **kwargs): self.parent_trigger = kwargs.get('parent_trigger', None) self.requested_start_time = kwargs.get('requested_start_time', None) self.requested_end_time = kwargs.get('requested_end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) + self.rerun_concurrency = kwargs.get('rerun_concurrency', None) self.type = 'RerunTumblingWindowTrigger' -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, **kwargs): - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.end_time = kwargs.get('end_time', None) - self.max_concurrency = kwargs.get('max_concurrency', None) - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -23894,6 +24348,11 @@ class ResponsysSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23910,6 +24369,7 @@ class ResponsysSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24138,6 +24598,11 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24156,6 +24621,7 @@ class RestSource(CopySource): 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): @@ -24166,6 +24632,7 @@ def __init__(self, **kwargs): self.pagination_rules = kwargs.get('pagination_rules', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'RestSource' @@ -24345,9 +24812,12 @@ class SalesforceLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -24369,6 +24839,7 @@ class SalesforceLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -24378,6 +24849,7 @@ def __init__(self, **kwargs): self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.security_token = kwargs.get('security_token', None) + self.api_version = kwargs.get('api_version', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'Salesforce' @@ -24546,6 +25018,11 @@ class SalesforceMarketingCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24562,6 +25039,7 @@ class SalesforceMarketingCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -24664,9 +25142,12 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). :type extended_properties: object @@ -24691,6 +25172,7 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } @@ -24701,6 +25183,7 @@ def __init__(self, **kwargs): self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.security_token = kwargs.get('security_token', None) + self.api_version = kwargs.get('api_version', None) self.extended_properties = kwargs.get('extended_properties', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'SalesforceServiceCloud' @@ -24866,6 +25349,11 @@ class SalesforceServiceCloudSource(CopySource): Query. Possible values include: 'Query', 'QueryAll' :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24880,12 +25368,14 @@ class SalesforceServiceCloudSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(SalesforceServiceCloudSource, self).__init__(**kwargs) self.query = kwargs.get('query', None) self.read_behavior = kwargs.get('read_behavior', None) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'SalesforceServiceCloudSource' @@ -24986,6 +25476,11 @@ class SalesforceSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -25006,6 +25501,7 @@ class SalesforceSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } @@ -25173,6 +25669,11 @@ class SapBwSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). :type query: object @@ -25189,6 +25690,7 @@ class SapBwSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25403,6 +25905,11 @@ class SapCloudForCustomerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25419,6 +25926,7 @@ class SapCloudForCustomerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25578,6 +26086,11 @@ class SapEccSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25594,6 +26107,7 @@ class SapEccSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -25719,6 +26233,11 @@ class SapHanaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object @@ -25747,6 +26266,7 @@ class SapHanaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, @@ -25940,6 +26460,11 @@ class SapOpenHubSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). @@ -25962,6 +26487,7 @@ class SapOpenHubSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } @@ -26297,6 +26823,11 @@ class SapTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -26342,6 +26873,7 @@ class SapTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -27032,6 +27564,11 @@ class ServiceNowSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27048,6 +27585,7 @@ class ServiceNowSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27162,6 +27700,10 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -27181,6 +27723,7 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } @@ -27190,6 +27733,7 @@ def __init__(self, **kwargs): self.recursive = kwargs.get('recursive', None) self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.file_list_path = kwargs.get('file_list_path', None) self.modified_datetime_start = kwargs.get('modified_datetime_start', None) self.modified_datetime_end = kwargs.get('modified_datetime_end', None) self.type = 'SftpReadSettings' @@ -27322,6 +27866,10 @@ class SftpWriteSettings(StoreWriteSettings): SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :type operation_timeout: object + :param use_temp_file_rename: Upload to temporary file(s) and rename. + Disable this option if your SFTP server doesn't support rename operation. + Type: boolean (or Expression with resultType boolean). + :type use_temp_file_rename: object """ _validation = { @@ -27334,11 +27882,13 @@ class SftpWriteSettings(StoreWriteSettings): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, } def __init__(self, **kwargs): super(SftpWriteSettings, self).__init__(**kwargs) self.operation_timeout = kwargs.get('operation_timeout', None) + self.use_temp_file_rename = kwargs.get('use_temp_file_rename', None) self.type = 'SftpWriteSettings' @@ -27501,6 +28051,11 @@ class ShopifySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27517,6 +28072,7 @@ class ShopifySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27526,6 +28082,346 @@ def __init__(self, **kwargs): self.type = 'ShopifySource' +class SkipErrorFile(Model): + """Skip error file. + + :param file_missing: Skip if file is deleted by other client during copy. + Default is true. Type: boolean (or Expression with resultType boolean). + :type file_missing: object + :param data_inconsistency: Skip if source/sink file changed by other + concurrent write. Default is false. Type: boolean (or Expression with + resultType boolean). + :type data_inconsistency: object + """ + + _attribute_map = { + 'file_missing': {'key': 'fileMissing', 'type': 'object'}, + 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SkipErrorFile, self).__init__(**kwargs) + self.file_missing = kwargs.get('file_missing', None) + self.data_inconsistency = kwargs.get('data_inconsistency', None) + + +class SnowflakeDataset(Dataset): + """The snowflake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param snowflake_dataset_schema: The schema name of the Snowflake + database. Type: string (or Expression with resultType string). + :type snowflake_dataset_schema: object + :param table: The table name of the Snowflake database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'snowflake_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SnowflakeDataset, self).__init__(**kwargs) + self.snowflake_dataset_schema = kwargs.get('snowflake_dataset_schema', None) + self.table = kwargs.get('table', None) + self.type = 'SnowflakeTable' + + +class SnowflakeExportCopyCommand(ExportSettings): + """Snowflake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(SnowflakeExportCopyCommand, self).__init__(**kwargs) + self.additional_copy_options = kwargs.get('additional_copy_options', None) + self.additional_format_options = kwargs.get('additional_format_options', None) + self.type = 'SnowflakeExportCopyCommand' + + +class SnowflakeImportCopyCommand(ImportSettings): + """Snowflake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, **kwargs): + super(SnowflakeImportCopyCommand, self).__init__(**kwargs) + self.additional_copy_options = kwargs.get('additional_copy_options', None) + self.additional_format_options = kwargs.get('additional_format_options', None) + self.type = 'SnowflakeImportCopyCommand' + + +class SnowflakeLinkedService(LinkedService): + """Snowflake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string of snowflake. + Type: string, SecureString. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(SnowflakeLinkedService, self).__init__(**kwargs) + self.connection_string = kwargs.get('connection_string', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'Snowflake' + + +class SnowflakeSink(CopySink): + """A copy activity snowflake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Snowflake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, + } + + def __init__(self, **kwargs): + super(SnowflakeSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + self.type = 'SnowflakeSink' + + +class SnowflakeSource(CopySource): + """A copy activity snowflake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Snowflake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Snowflake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, + } + + def __init__(self, **kwargs): + super(SnowflakeSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + self.type = 'SnowflakeSource' + + class SparkLinkedService(LinkedService): """Spark Server linked service. @@ -27740,6 +28636,11 @@ class SparkSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27756,6 +28657,7 @@ class SparkSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -27872,6 +28774,11 @@ class SqlDWSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -27897,6 +28804,7 @@ class SqlDWSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, @@ -28016,6 +28924,11 @@ class SqlMISource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28042,6 +28955,7 @@ class SqlMISource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -28225,6 +29139,11 @@ class SqlServerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28251,6 +29170,7 @@ class SqlServerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, @@ -28500,6 +29420,11 @@ class SqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28511,6 +29436,12 @@ class SqlSource(TabularSource): procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param isolation_level: Specifies the transaction locking behavior for the + SQL source. Allowed values: + ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The + default value is ReadCommitted. Type: string (or Expression with + resultType string). + :type isolation_level: object """ _validation = { @@ -28524,9 +29455,11 @@ class SqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, } def __init__(self, **kwargs): @@ -28534,6 +29467,7 @@ def __init__(self, **kwargs): self.sql_reader_query = kwargs.get('sql_reader_query', None) self.sql_reader_stored_procedure_name = kwargs.get('sql_reader_stored_procedure_name', None) self.stored_procedure_parameters = kwargs.get('stored_procedure_parameters', None) + self.isolation_level = kwargs.get('isolation_level', None) self.type = 'SqlSource' @@ -28708,6 +29642,11 @@ class SquareSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28724,6 +29663,7 @@ class SquareSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29617,6 +30557,11 @@ class SybaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -29633,6 +30578,7 @@ class SybaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -29829,6 +30775,11 @@ class TeradataSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object @@ -29854,6 +30805,7 @@ class TeradataSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, @@ -30053,6 +31005,28 @@ def __init__(self, **kwargs): self.type = 'TriggerDependencyReference' +class TriggerFilterParameters(Model): + """Query parameters for triggers. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger + to get the child rerun triggers + :type parent_trigger_name: str + """ + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.parent_trigger_name = kwargs.get('parent_trigger_name', None) + + class TriggerPipelineReference(Model): """Pipeline that needs to be triggered with the given parameters. @@ -30073,6 +31047,33 @@ def __init__(self, **kwargs): self.parameters = kwargs.get('parameters', None) +class TriggerQueryResponse(Model): + """A query of triggers. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(TriggerQueryResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.continuation_token = kwargs.get('continuation_token', None) + + class TriggerReference(Model): """Trigger reference type. @@ -30770,6 +31771,11 @@ class VerticaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30786,6 +31792,7 @@ class VerticaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31345,6 +32352,11 @@ class WebSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -31357,10 +32369,12 @@ class WebSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } def __init__(self, **kwargs): super(WebSource, self).__init__(**kwargs) + self.additional_columns = kwargs.get('additional_columns', None) self.type = 'WebSource' @@ -31596,6 +32610,11 @@ class XeroSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31612,6 +32631,7 @@ class XeroSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } @@ -31779,6 +32799,11 @@ class ZohoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31795,6 +32820,7 @@ class ZohoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 8c7c24c27e52..e3e162534a1f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -302,13 +302,35 @@ def __init__(self, *, job_version: str=None, **kwargs) -> None: self.job_version = job_version +class AdditionalColumns(Model): + """Specify the column name and value of additional columns. + + :param name: Additional column name. Type: string (or Expression with + resultType string). + :type name: object + :param value: Additional column value. Type: string (or Expression with + resultType string). + :type value: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'value': {'key': 'value', 'type': 'object'}, + } + + def __init__(self, *, name=None, value=None, **kwargs) -> None: + super(AdditionalColumns, self).__init__(**kwargs) + self.name = name + self.value = value + + class LinkedService(Model): """The Azure Data Factory nested object which contains the information and credential which can be used to connect with related store or compute resource. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureFunctionLinkedService, + sub-classes are: SnowflakeLinkedService, AzureFunctionLinkedService, AzureDataExplorerLinkedService, SapTableLinkedService, GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, DynamicsAXLinkedService, ResponsysLinkedService, @@ -380,7 +402,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: @@ -492,28 +514,29 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, - OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, - ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, - VerticaTableDataset, NetezzaTableDataset, ZohoObjectDataset, - XeroObjectDataset, SquareObjectDataset, SparkObjectDataset, - ShopifyObjectDataset, ServiceNowObjectDataset, QuickBooksObjectDataset, - PrestoObjectDataset, PhoenixObjectDataset, PaypalObjectDataset, - MarketoObjectDataset, AzureMariaDBTableDataset, MariaDBTableDataset, - MagentoObjectDataset, JiraObjectDataset, ImpalaObjectDataset, - HubspotObjectDataset, HiveObjectDataset, HBaseObjectDataset, - GreenplumTableDataset, GoogleBigQueryObjectDataset, EloquaObjectDataset, - DrillTableDataset, CouchbaseTableDataset, ConcurObjectDataset, - AzurePostgreSqlTableDataset, AmazonMWSObjectDataset, HttpDataset, - AzureSearchIndexDataset, WebTableDataset, SapTableResourceDataset, - RestResourceDataset, SqlServerTableDataset, SapOpenHubTableDataset, - SapHanaTableDataset, SapEccResourceDataset, - SapCloudForCustomerResourceDataset, SapBwCubeDataset, SybaseTableDataset, - SalesforceServiceCloudObjectDataset, SalesforceObjectDataset, - MicrosoftAccessTableDataset, PostgreSqlTableDataset, MySqlTableDataset, - OdbcTableDataset, InformixTableDataset, RelationalTableDataset, - Db2TableDataset, AmazonRedshiftTableDataset, AzureMySqlTableDataset, - TeradataTableDataset, OracleTableDataset, ODataResourceDataset, + sub-classes are: SnowflakeDataset, GoogleAdWordsObjectDataset, + AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, + DynamicsAXResourceDataset, ResponsysObjectDataset, + SalesforceMarketingCloudObjectDataset, VerticaTableDataset, + NetezzaTableDataset, ZohoObjectDataset, XeroObjectDataset, + SquareObjectDataset, SparkObjectDataset, ShopifyObjectDataset, + ServiceNowObjectDataset, QuickBooksObjectDataset, PrestoObjectDataset, + PhoenixObjectDataset, PaypalObjectDataset, MarketoObjectDataset, + AzureMariaDBTableDataset, MariaDBTableDataset, MagentoObjectDataset, + JiraObjectDataset, ImpalaObjectDataset, HubspotObjectDataset, + HiveObjectDataset, HBaseObjectDataset, GreenplumTableDataset, + GoogleBigQueryObjectDataset, EloquaObjectDataset, DrillTableDataset, + CouchbaseTableDataset, ConcurObjectDataset, AzurePostgreSqlTableDataset, + AmazonMWSObjectDataset, HttpDataset, AzureSearchIndexDataset, + WebTableDataset, SapTableResourceDataset, RestResourceDataset, + SqlServerTableDataset, SapOpenHubTableDataset, SapHanaTableDataset, + SapEccResourceDataset, SapCloudForCustomerResourceDataset, + SapBwCubeDataset, SybaseTableDataset, SalesforceServiceCloudObjectDataset, + SalesforceObjectDataset, MicrosoftAccessTableDataset, + PostgreSqlTableDataset, MySqlTableDataset, OdbcTableDataset, + InformixTableDataset, RelationalTableDataset, Db2TableDataset, + AmazonRedshiftTableDataset, AzureMySqlTableDataset, TeradataTableDataset, + OracleTableDataset, ODataResourceDataset, CosmosDbMongoDbApiCollectionDataset, MongoDbV2CollectionDataset, MongoDbCollectionDataset, FileShareDataset, Office365Dataset, AzureBlobFSDataset, AzureDataLakeStoreDataset, @@ -573,7 +596,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: @@ -653,14 +676,15 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, - Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, - WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, - FileSystemSource, RestSource, SalesforceServiceCloudSource, ODataSource, - MicrosoftAccessSource, RelationalSource, CommonDataServiceForAppsSource, - DynamicsCrmSource, DynamicsSource, CosmosDbSqlApiSource, - DocumentDbCollectionSource, BlobSource, TabularSource, BinarySource, - OrcSource, JsonSource, DelimitedTextSource, ParquetSource, AvroSource + sub-classes are: SnowflakeSource, HttpSource, AzureBlobFSSource, + AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, + MongoDbV2Source, MongoDbSource, WebSource, OracleSource, + AzureDataExplorerSource, HdfsSource, FileSystemSource, RestSource, + SalesforceServiceCloudSource, ODataSource, MicrosoftAccessSource, + RelationalSource, CommonDataServiceForAppsSource, DynamicsCrmSource, + DynamicsSource, CosmosDbSqlApiSource, DocumentDbCollectionSource, + BlobSource, TabularSource, BinarySource, OrcSource, JsonSource, + DelimitedTextSource, ParquetSource, AvroSource All required parameters must be populated in order to send to Azure. @@ -695,7 +719,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} + 'type': {'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -748,6 +772,11 @@ class TabularSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -761,15 +790,17 @@ class TabularSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } _subtype_map = { 'type': {'AmazonRedshiftSource': 'AmazonRedshiftSource', 'GoogleAdWordsSource': 'GoogleAdWordsSource', 'OracleServiceCloudSource': 'OracleServiceCloudSource', 'DynamicsAXSource': 'DynamicsAXSource', 'ResponsysSource': 'ResponsysSource', 'SalesforceMarketingCloudSource': 'SalesforceMarketingCloudSource', 'VerticaSource': 'VerticaSource', 'NetezzaSource': 'NetezzaSource', 'ZohoSource': 'ZohoSource', 'XeroSource': 'XeroSource', 'SquareSource': 'SquareSource', 'SparkSource': 'SparkSource', 'ShopifySource': 'ShopifySource', 'ServiceNowSource': 'ServiceNowSource', 'QuickBooksSource': 'QuickBooksSource', 'PrestoSource': 'PrestoSource', 'PhoenixSource': 'PhoenixSource', 'PaypalSource': 'PaypalSource', 'MarketoSource': 'MarketoSource', 'AzureMariaDBSource': 'AzureMariaDBSource', 'MariaDBSource': 'MariaDBSource', 'MagentoSource': 'MagentoSource', 'JiraSource': 'JiraSource', 'ImpalaSource': 'ImpalaSource', 'HubspotSource': 'HubspotSource', 'HiveSource': 'HiveSource', 'HBaseSource': 'HBaseSource', 'GreenplumSource': 'GreenplumSource', 'GoogleBigQuerySource': 'GoogleBigQuerySource', 'EloquaSource': 'EloquaSource', 'DrillSource': 'DrillSource', 'CouchbaseSource': 'CouchbaseSource', 'ConcurSource': 'ConcurSource', 'AzurePostgreSqlSource': 'AzurePostgreSqlSource', 'AmazonMWSSource': 'AmazonMWSSource', 'CassandraSource': 'CassandraSource', 'TeradataSource': 'TeradataSource', 'AzureMySqlSource': 'AzureMySqlSource', 'SqlDWSource': 'SqlDWSource', 'SqlMISource': 'SqlMISource', 'AzureSqlSource': 'AzureSqlSource', 'SqlServerSource': 'SqlServerSource', 'SqlSource': 'SqlSource', 'SapTableSource': 'SapTableSource', 'SapOpenHubSource': 'SapOpenHubSource', 'SapHanaSource': 'SapHanaSource', 'SapEccSource': 'SapEccSource', 'SapCloudForCustomerSource': 'SapCloudForCustomerSource', 'SalesforceSource': 'SalesforceSource', 'SapBwSource': 'SapBwSource', 'SybaseSource': 'SybaseSource', 'PostgreSqlSource': 'PostgreSqlSource', 'MySqlSource': 'MySqlSource', 'OdbcSource': 'OdbcSource', 'Db2Source': 'Db2Source', 'InformixSource': 'InformixSource', 'AzureTableSource': 'AzureTableSource'} } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(TabularSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'TabularSource' @@ -798,6 +829,11 @@ class AmazonMWSSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -814,11 +850,12 @@ class AmazonMWSSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AmazonMWSSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AmazonMWSSource' @@ -922,6 +959,11 @@ class AmazonRedshiftSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -944,12 +986,13 @@ class AmazonRedshiftSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'redshift_unload_settings': {'key': 'redshiftUnloadSettings', 'type': 'RedshiftUnloadSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, redshift_unload_settings=None, **kwargs) -> None: - super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, redshift_unload_settings=None, **kwargs) -> None: + super(AmazonRedshiftSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.redshift_unload_settings = redshift_unload_settings self.type = 'AmazonRedshiftSource' @@ -1347,6 +1390,10 @@ class AmazonS3ReadSettings(StoreReadSettings): :param prefix: The prefix filter for the S3 object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -1370,17 +1417,19 @@ class AmazonS3ReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AmazonS3ReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -1642,7 +1691,7 @@ class CopySink(Model): SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, OrcSink, @@ -1690,7 +1739,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -1784,6 +1833,11 @@ class AvroSource(CopySource): :type type: str :param store_settings: Avro store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -1797,11 +1851,13 @@ class AvroSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(AvroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'AvroSource' @@ -2249,6 +2305,10 @@ class AzureBlobFSReadSettings(StoreReadSettings): :param wildcard_file_name: Azure blobFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2271,16 +2331,18 @@ class AzureBlobFSReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobFSReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -2638,6 +2700,10 @@ class AzureBlobStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Azure Blob name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -2661,17 +2727,19 @@ class AzureBlobStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureBlobStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -3142,6 +3210,11 @@ class AzureDataExplorerSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])).. :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -3158,13 +3231,15 @@ class AzureDataExplorerSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'no_truncation': {'key': 'noTruncation', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, query, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, no_truncation=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(AzureDataExplorerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.no_truncation = no_truncation self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'AzureDataExplorerSource' @@ -3533,6 +3608,10 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): :param wildcard_file_name: ADLS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3555,16 +3634,18 @@ class AzureDataLakeStoreReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureDataLakeStoreReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -3690,6 +3771,11 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): :type copy_behavior: object :param type: Required. Constant filled by server. :type type: str + :param expiry_date_time: Specifies the expiry time of the written files. + The time is applied to the UTC time zone in the format of + "2018-12-01T05:00:00Z". Default value is NULL. Type: integer (or + Expression with resultType integer). + :type expiry_date_time: object """ _validation = { @@ -3701,10 +3787,12 @@ class AzureDataLakeStoreWriteSettings(StoreWriteSettings): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'expiry_date_time': {'key': 'expiryDateTime', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, expiry_date_time=None, **kwargs) -> None: super(AzureDataLakeStoreWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) + self.expiry_date_time = expiry_date_time self.type = 'AzureDataLakeStoreWriteSettings' @@ -3828,6 +3916,10 @@ class AzureFileStorageReadSettings(StoreReadSettings): :param wildcard_file_name: Azure File Storage wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -3850,16 +3942,18 @@ class AzureFileStorageReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -4194,6 +4288,11 @@ class AzureMariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -4210,11 +4309,12 @@ class AzureMariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AzureMariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzureMariaDBSource' @@ -4834,6 +4934,11 @@ class AzureMySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -4850,11 +4955,12 @@ class AzureMySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AzureMySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzureMySqlSource' @@ -5061,6 +5167,11 @@ class AzurePostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -5077,11 +5188,12 @@ class AzurePostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(AzurePostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'AzurePostgreSqlSource' @@ -5852,6 +5964,11 @@ class AzureSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -5878,14 +5995,15 @@ class AzureSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(AzureSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -6189,6 +6307,11 @@ class AzureTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param azure_table_source_query: Azure Table source query. Type: string (or Expression with resultType string). :type azure_table_source_query: object @@ -6209,12 +6332,13 @@ class AzureTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'azure_table_source_query': {'key': 'azureTableSourceQuery', 'type': 'object'}, 'azure_table_source_ignore_table_not_found': {'key': 'azureTableSourceIgnoreTableNotFound', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: - super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, azure_table_source_query=None, azure_table_source_ignore_table_not_found=None, **kwargs) -> None: + super(AzureTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.azure_table_source_query = azure_table_source_query self.azure_table_source_ignore_table_not_found = azure_table_source_ignore_table_not_found self.type = 'AzureTableSource' @@ -6458,7 +6582,7 @@ class Trigger(Model): pipeline run. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: RerunTumblingWindowTrigger, ChainingTrigger, + sub-classes are: ChainingTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger, MultiplePipelineTrigger Variables are only populated by the server, and will be ignored when @@ -6497,7 +6621,7 @@ class Trigger(Model): } _subtype_map = { - 'type': {'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'ChainingTrigger': 'ChainingTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} + 'type': {'ChainingTrigger': 'ChainingTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger'} } def __init__(self, *, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: @@ -6930,6 +7054,11 @@ class CassandraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command. Type: string (or Expression with resultType string). @@ -6957,12 +7086,13 @@ class CassandraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'consistency_level': {'key': 'consistencyLevel', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, consistency_level=None, **kwargs) -> None: - super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, consistency_level=None, **kwargs) -> None: + super(CassandraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.consistency_level = consistency_level self.type = 'CassandraSource' @@ -7495,6 +7625,11 @@ class CommonDataServiceForAppsSource(CopySource): Microsoft Common Data Service for Apps (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -7508,11 +7643,13 @@ class CommonDataServiceForAppsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(CommonDataServiceForAppsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'CommonDataServiceForAppsSource' @@ -7712,6 +7849,11 @@ class ConcurSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -7728,11 +7870,12 @@ class ConcurSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ConcurSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ConcurSource' @@ -7790,10 +7933,19 @@ class CopyActivity(ExecutionActivity): settings when EnableSkipIncompatibleRow is true. :type redirect_incompatible_row_settings: ~azure.mgmt.datafactory.models.RedirectIncompatibleRowSettings + :param log_storage_settings: Log storage settings customer need to provide + when enabling session log. + :type log_storage_settings: + ~azure.mgmt.datafactory.models.LogStorageSettings :param preserve_rules: Preserve Rules. :type preserve_rules: list[object] :param preserve: Preserve rules. :type preserve: list[object] + :param validate_data_consistency: Whether to enable Data Consistency + validation. Type: boolean (or Expression with resultType boolean). + :type validate_data_consistency: object + :param skip_error_file: Specify the fault tolerance for data consistency. + :type skip_error_file: ~azure.mgmt.datafactory.models.SkipErrorFile :param inputs: List of inputs for the activity. :type inputs: list[~azure.mgmt.datafactory.models.DatasetReference] :param outputs: List of outputs for the activity. @@ -7825,13 +7977,16 @@ class CopyActivity(ExecutionActivity): 'data_integration_units': {'key': 'typeProperties.dataIntegrationUnits', 'type': 'object'}, 'enable_skip_incompatible_row': {'key': 'typeProperties.enableSkipIncompatibleRow', 'type': 'object'}, 'redirect_incompatible_row_settings': {'key': 'typeProperties.redirectIncompatibleRowSettings', 'type': 'RedirectIncompatibleRowSettings'}, + 'log_storage_settings': {'key': 'typeProperties.logStorageSettings', 'type': 'LogStorageSettings'}, 'preserve_rules': {'key': 'typeProperties.preserveRules', 'type': '[object]'}, 'preserve': {'key': 'typeProperties.preserve', 'type': '[object]'}, + 'validate_data_consistency': {'key': 'typeProperties.validateDataConsistency', 'type': 'object'}, + 'skip_error_file': {'key': 'typeProperties.skipErrorFile', 'type': 'SkipErrorFile'}, 'inputs': {'key': 'inputs', 'type': '[DatasetReference]'}, 'outputs': {'key': 'outputs', 'type': '[DatasetReference]'}, } - def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, preserve_rules=None, preserve=None, inputs=None, outputs=None, **kwargs) -> None: + def __init__(self, *, name: str, source, sink, additional_properties=None, description: str=None, depends_on=None, user_properties=None, linked_service_name=None, policy=None, translator=None, enable_staging=None, staging_settings=None, parallel_copies=None, data_integration_units=None, enable_skip_incompatible_row=None, redirect_incompatible_row_settings=None, log_storage_settings=None, preserve_rules=None, preserve=None, validate_data_consistency=None, skip_error_file=None, inputs=None, outputs=None, **kwargs) -> None: super(CopyActivity, self).__init__(additional_properties=additional_properties, name=name, description=description, depends_on=depends_on, user_properties=user_properties, linked_service_name=linked_service_name, policy=policy, **kwargs) self.source = source self.sink = sink @@ -7842,8 +7997,11 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.data_integration_units = data_integration_units self.enable_skip_incompatible_row = enable_skip_incompatible_row self.redirect_incompatible_row_settings = redirect_incompatible_row_settings + self.log_storage_settings = log_storage_settings self.preserve_rules = preserve_rules self.preserve = preserve + self.validate_data_consistency = validate_data_consistency + self.skip_error_file = skip_error_file self.inputs = inputs self.outputs = outputs self.type = 'Copy' @@ -8126,6 +8284,11 @@ class CosmosDbMongoDbApiSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8142,14 +8305,16 @@ class CosmosDbMongoDbApiSource(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(CosmosDbMongoDbApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'CosmosDbMongoDbApiSource' @@ -8299,6 +8464,11 @@ class CosmosDbSqlApiSource(CopySource): :param preferred_regions: Preferred regions. Type: array of strings (or Expression with resultType array of strings). :type preferred_regions: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -8314,13 +8484,15 @@ class CosmosDbSqlApiSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'page_size': {'key': 'pageSize', 'type': 'object'}, 'preferred_regions': {'key': 'preferredRegions', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, page_size=None, preferred_regions=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, page_size=None, preferred_regions=None, additional_columns=None, **kwargs) -> None: super(CosmosDbSqlApiSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.page_size = page_size self.preferred_regions = preferred_regions + self.additional_columns = additional_columns self.type = 'CosmosDbSqlApiSource' @@ -8407,6 +8579,11 @@ class CouchbaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -8423,11 +8600,12 @@ class CouchbaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(CouchbaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'CouchbaseSource' @@ -9925,37 +10103,47 @@ class Db2LinkedService(LinkedService): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param server: Required. Server name for connection. Type: string (or - Expression with resultType string). + :param connection_string: The connection string. It is mutually exclusive + with server, database, authenticationType, userName, packageCollection and + certificateCommonName property. Type: string, SecureString or + AzureKeyVaultSecretReference. + :type connection_string: object + :param server: Server name for connection. It is mutually exclusive with + connectionString property. Type: string (or Expression with resultType + string). :type server: object - :param database: Required. Database name for connection. Type: string (or - Expression with resultType string). + :param database: Database name for connection. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type database: object :param authentication_type: AuthenticationType to be used for connection. - Possible values include: 'Basic' + It is mutually exclusive with connectionString property. Possible values + include: 'Basic' :type authentication_type: str or ~azure.mgmt.datafactory.models.Db2AuthenticationType - :param username: Username for authentication. Type: string (or Expression - with resultType string). + :param username: Username for authentication. It is mutually exclusive + with connectionString property. Type: string (or Expression with + resultType string). :type username: object :param password: Password for authentication. :type password: ~azure.mgmt.datafactory.models.SecretBase :param package_collection: Under where packages are created when querying - database. Type: string (or Expression with resultType string). + database. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type package_collection: object :param certificate_common_name: Certificate Common Name when TLS is - enabled. Type: string (or Expression with resultType string). + enabled. It is mutually exclusive with connectionString property. Type: + string (or Expression with resultType string). :type certificate_common_name: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime - credential manager. Type: string (or Expression with resultType string). + credential manager. It is mutually exclusive with connectionString + property. Type: string (or Expression with resultType string). :type encrypted_credential: object """ _validation = { 'type': {'required': True}, - 'server': {'required': True}, - 'database': {'required': True}, } _attribute_map = { @@ -9965,6 +10153,7 @@ class Db2LinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, 'server': {'key': 'typeProperties.server', 'type': 'object'}, 'database': {'key': 'typeProperties.database', 'type': 'object'}, 'authentication_type': {'key': 'typeProperties.authenticationType', 'type': 'str'}, @@ -9975,8 +10164,9 @@ class Db2LinkedService(LinkedService): 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, server, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, authentication_type=None, username=None, password=None, package_collection=None, certificate_common_name=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, server=None, database=None, authentication_type=None, username=None, password=None, package_collection=None, certificate_common_name=None, encrypted_credential=None, **kwargs) -> None: super(Db2LinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string self.server = server self.database = database self.authentication_type = authentication_type @@ -10013,6 +10203,11 @@ class Db2Source(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -10029,11 +10224,12 @@ class Db2Source(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(Db2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'Db2Source' @@ -10459,6 +10655,11 @@ class DelimitedTextSource(CopySource): :param format_settings: DelimitedText format settings. :type format_settings: ~azure.mgmt.datafactory.models.DelimitedTextReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10473,12 +10674,14 @@ class DelimitedTextSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, 'format_settings': {'key': 'formatSettings', 'type': 'DelimitedTextReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, additional_columns=None, **kwargs) -> None: super(DelimitedTextSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings self.format_settings = format_settings + self.additional_columns = additional_columns self.type = 'DelimitedTextSource' @@ -10739,6 +10942,11 @@ class DocumentDbCollectionSource(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -10754,13 +10962,15 @@ class DocumentDbCollectionSource(CopySource): 'query': {'key': 'query', 'type': 'object'}, 'nesting_separator': {'key': 'nestingSeparator', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, nesting_separator=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(DocumentDbCollectionSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.nesting_separator = nesting_separator self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'DocumentDbCollectionSource' @@ -10846,6 +11056,11 @@ class DrillSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -10862,11 +11077,12 @@ class DrillSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(DrillSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'DrillSource' @@ -11157,6 +11373,11 @@ class DynamicsAXSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11173,11 +11394,12 @@ class DynamicsAXSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(DynamicsAXSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'DynamicsAXSource' @@ -11463,6 +11685,11 @@ class DynamicsCrmSource(CopySource): Microsoft Dynamics CRM (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11476,11 +11703,13 @@ class DynamicsCrmSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(DynamicsCrmSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'DynamicsCrmSource' @@ -11762,6 +11991,11 @@ class DynamicsSource(CopySource): Microsoft Dynamics (online & on-premises). Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -11775,11 +12009,13 @@ class DynamicsSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(DynamicsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'DynamicsSource' @@ -11947,6 +12183,11 @@ class EloquaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -11963,11 +12204,12 @@ class EloquaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(EloquaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'EloquaSource' @@ -12280,6 +12522,40 @@ def __init__(self, *, name: str, package_location, connect_via, additional_prope self.type = 'ExecuteSSISPackage' +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + class ExposureControlRequest(Model): """The exposure control request. @@ -12821,6 +13097,10 @@ class FileServerReadSettings(StoreReadSettings): :param wildcard_file_name: FileServer wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -12843,16 +13123,18 @@ class FileServerReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(FileServerReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -13060,6 +13342,11 @@ class FileSystemSource(CopySource): recursively. Default is true. Type: boolean (or Expression with resultType boolean). :type recursive: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -13073,11 +13360,13 @@ class FileSystemSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'recursive': {'key': 'recursive', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, recursive=None, additional_columns=None, **kwargs) -> None: super(FileSystemSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive + self.additional_columns = additional_columns self.type = 'FileSystemSource' @@ -13215,6 +13504,10 @@ class FtpReadSettings(StoreReadSettings): :param wildcard_file_name: Ftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param use_binary_transfer: Specify whether to use binary transfer mode for FTP stores. :type use_binary_transfer: bool @@ -13231,14 +13524,16 @@ class FtpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'use_binary_transfer': {'key': 'useBinaryTransfer', 'type': 'bool'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, use_binary_transfer: bool=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, use_binary_transfer: bool=None, **kwargs) -> None: super(FtpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.use_binary_transfer = use_binary_transfer self.type = 'FtpReadSettings' @@ -13695,6 +13990,11 @@ class GoogleAdWordsSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13711,11 +14011,12 @@ class GoogleAdWordsSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(GoogleAdWordsSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GoogleAdWordsSource' @@ -13927,6 +14228,11 @@ class GoogleBigQuerySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -13943,11 +14249,12 @@ class GoogleBigQuerySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(GoogleBigQuerySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GoogleBigQuerySource' @@ -14089,6 +14396,10 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): :param prefix: The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with resultType string). :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14112,17 +14423,19 @@ class GoogleCloudStorageReadSettings(StoreReadSettings): 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name self.prefix = prefix + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -14211,6 +14524,11 @@ class GreenplumSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14227,11 +14545,12 @@ class GreenplumSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(GreenplumSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'GreenplumSource' @@ -14493,6 +14812,11 @@ class HBaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -14509,11 +14833,12 @@ class HBaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(HBaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HBaseSource' @@ -14645,6 +14970,10 @@ class HdfsReadSettings(StoreReadSettings): :param wildcard_file_name: HDFS wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param enable_partition_discovery: Indicates whether to enable partition discovery. :type enable_partition_discovery: bool @@ -14669,17 +14998,19 @@ class HdfsReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, 'distcp_settings': {'key': 'distcpSettings', 'type': 'DistcpSettings'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, distcp_settings=None, **kwargs) -> None: super(HdfsReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.enable_partition_discovery = enable_partition_discovery self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end @@ -15721,6 +16052,11 @@ class HiveSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -15737,11 +16073,12 @@ class HiveSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(HiveSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HiveSource' @@ -16234,6 +16571,11 @@ class HubspotSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16250,11 +16592,12 @@ class HubspotSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(HubspotSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'HubspotSource' @@ -16519,6 +16862,11 @@ class ImpalaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -16535,15 +16883,50 @@ class ImpalaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ImpalaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ImpalaSource' +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + class InformixLinkedService(LinkedService): """Informix linked service. @@ -16697,6 +17080,11 @@ class InformixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -16713,11 +17101,12 @@ class InformixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(InformixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'InformixSource' @@ -17676,6 +18065,11 @@ class JiraSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -17692,11 +18086,12 @@ class JiraSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(JiraSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'JiraSource' @@ -17924,6 +18319,11 @@ class JsonSource(CopySource): :type type: str :param store_settings: Json store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -17937,11 +18337,13 @@ class JsonSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(JsonSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'JsonSource' @@ -18490,6 +18892,11 @@ class MagentoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18506,11 +18913,12 @@ class MagentoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MagentoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MagentoSource' @@ -18901,6 +19309,11 @@ class MariaDBSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -18917,11 +19330,12 @@ class MariaDBSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MariaDBSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MariaDBSource' @@ -19149,6 +19563,11 @@ class MarketoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -19165,11 +19584,12 @@ class MarketoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MarketoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MarketoSource' @@ -19326,6 +19746,11 @@ class MicrosoftAccessSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19339,11 +19764,13 @@ class MicrosoftAccessSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(MicrosoftAccessSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'MicrosoftAccessSource' @@ -19630,6 +20057,11 @@ class MongoDbSource(CopySource): :param query: Database query. Should be a SQL-92 query expression. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19643,11 +20075,13 @@ class MongoDbSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(MongoDbSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'MongoDbSource' @@ -19804,6 +20238,11 @@ class MongoDbV2Source(CopySource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -19820,14 +20259,16 @@ class MongoDbV2Source(CopySource): 'cursor_methods': {'key': 'cursorMethods', 'type': 'MongoDbCursorMethodsProperties'}, 'batch_size': {'key': 'batchSize', 'type': 'object'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, filter=None, cursor_methods=None, batch_size=None, query_timeout=None, additional_columns=None, **kwargs) -> None: super(MongoDbV2Source, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.filter = filter self.cursor_methods = cursor_methods self.batch_size = batch_size self.query_timeout = query_timeout + self.additional_columns = additional_columns self.type = 'MongoDbV2Source' @@ -19914,6 +20355,11 @@ class MySqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -19930,11 +20376,12 @@ class MySqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(MySqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'MySqlSource' @@ -20111,6 +20558,11 @@ class NetezzaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -20136,13 +20588,14 @@ class NetezzaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'NetezzaPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(NetezzaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -20418,6 +20871,11 @@ class ODataSource(CopySource): :param query: OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -20431,11 +20889,13 @@ class ODataSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(ODataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'ODataSource' @@ -20592,6 +21052,11 @@ class OdbcSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -20608,11 +21073,12 @@ class OdbcSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(OdbcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'OdbcSource' @@ -21355,6 +21821,11 @@ class OracleServiceCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -21371,11 +21842,12 @@ class OracleServiceCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(OracleServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'OracleServiceCloudSource' @@ -21471,6 +21943,11 @@ class OracleSource(CopySource): source partitioning. :type partition_settings: ~azure.mgmt.datafactory.models.OraclePartitionSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21487,14 +21964,16 @@ class OracleSource(CopySource): 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'OraclePartitionSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, oracle_reader_query=None, query_timeout=None, partition_option=None, partition_settings=None, additional_columns=None, **kwargs) -> None: super(OracleSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.oracle_reader_query = oracle_reader_query self.query_timeout = query_timeout self.partition_option = partition_option self.partition_settings = partition_settings + self.additional_columns = additional_columns self.type = 'OracleSource' @@ -21744,6 +22223,11 @@ class OrcSource(CopySource): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21757,11 +22241,13 @@ class OrcSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(OrcSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'OrcSource' @@ -21966,6 +22452,11 @@ class ParquetSource(CopySource): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreReadSettings + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -21979,11 +22470,13 @@ class ParquetSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreReadSettings'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, store_settings=None, additional_columns=None, **kwargs) -> None: super(ParquetSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.additional_columns = additional_columns self.type = 'ParquetSource' @@ -22152,6 +22645,11 @@ class PaypalSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22168,11 +22666,12 @@ class PaypalSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PaypalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PaypalSource' @@ -22381,6 +22880,11 @@ class PhoenixSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -22397,11 +22901,12 @@ class PhoenixSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PhoenixSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PhoenixSource' @@ -22811,6 +23316,11 @@ class PostgreSqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -22827,11 +23337,12 @@ class PostgreSqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PostgreSqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PostgreSqlSource' @@ -23121,6 +23632,11 @@ class PrestoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23137,11 +23653,12 @@ class PrestoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(PrestoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'PrestoSource' @@ -23319,6 +23836,11 @@ class QuickBooksSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23335,11 +23857,12 @@ class QuickBooksSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(QuickBooksSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'QuickBooksSource' @@ -23503,6 +24026,11 @@ class RelationalSource(CopySource): :param query: Database query. Type: string (or Expression with resultType string). :type query: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -23516,11 +24044,13 @@ class RelationalSource(CopySource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, additional_columns=None, **kwargs) -> None: super(RelationalSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query + self.additional_columns = additional_columns self.type = 'RelationalSource' @@ -23584,48 +24114,6 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'RelationalTable' -class RerunTriggerResource(SubResource): - """RerunTrigger resource type. - - Variables are only populated by the server, and will be ignored when - sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Properties of the rerun trigger. - :type properties: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTrigger - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'RerunTumblingWindowTrigger'}, - } - - def __init__(self, *, properties, **kwargs) -> None: - super(RerunTriggerResource, self).__init__(**kwargs) - self.properties = properties - - class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -23650,7 +24138,7 @@ class RerunTumblingWindowTrigger(Trigger): :type annotations: list[object] :param type: Required. Constant filled by server. :type type: str - :param parent_trigger: The parent trigger reference. + :param parent_trigger: Required. The parent trigger reference. :type parent_trigger: object :param requested_start_time: Required. The start time for the time period for which restatement is initiated. Only UTC time is currently supported. @@ -23658,17 +24146,18 @@ class RerunTumblingWindowTrigger(Trigger): :param requested_end_time: Required. The end time for the time period for which restatement is initiated. Only UTC time is currently supported. :type requested_end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int + :param rerun_concurrency: Required. The max number of parallel time + windows (ready for execution) for which a rerun is triggered. + :type rerun_concurrency: int """ _validation = { 'runtime_state': {'readonly': True}, 'type': {'required': True}, + 'parent_trigger': {'required': True}, 'requested_start_time': {'required': True}, 'requested_end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { @@ -23680,53 +24169,18 @@ class RerunTumblingWindowTrigger(Trigger): 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, } - def __init__(self, *, requested_start_time, requested_end_time, max_concurrency: int, additional_properties=None, description: str=None, annotations=None, parent_trigger=None, **kwargs) -> None: + def __init__(self, *, parent_trigger, requested_start_time, requested_end_time, rerun_concurrency: int, additional_properties=None, description: str=None, annotations=None, **kwargs) -> None: super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) self.parent_trigger = parent_trigger self.requested_start_time = requested_start_time self.requested_end_time = requested_end_time - self.max_concurrency = max_concurrency + self.rerun_concurrency = rerun_concurrency self.type = 'RerunTumblingWindowTrigger' -class RerunTumblingWindowTriggerActionParameters(Model): - """Rerun tumbling window trigger Parameters. - - All required parameters must be populated in order to send to Azure. - - :param start_time: Required. The start time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type start_time: datetime - :param end_time: Required. The end time for the time period for which - restatement is initiated. Only UTC time is currently supported. - :type end_time: datetime - :param max_concurrency: Required. The max number of parallel time windows - (ready for execution) for which a rerun is triggered. - :type max_concurrency: int - """ - - _validation = { - 'start_time': {'required': True}, - 'end_time': {'required': True}, - 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, - } - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, - 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, - 'max_concurrency': {'key': 'maxConcurrency', 'type': 'int'}, - } - - def __init__(self, *, start_time, end_time, max_concurrency: int, **kwargs) -> None: - super(RerunTumblingWindowTriggerActionParameters, self).__init__(**kwargs) - self.start_time = start_time - self.end_time = end_time - self.max_concurrency = max_concurrency - - class ResponsysLinkedService(LinkedService): """Responsys linked service. @@ -23894,6 +24348,11 @@ class ResponsysSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -23910,11 +24369,12 @@ class ResponsysSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ResponsysSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ResponsysSource' @@ -24138,6 +24598,11 @@ class RestSource(CopySource): :param request_interval: The time to await before sending next page request. :type request_interval: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24156,9 +24621,10 @@ class RestSource(CopySource): 'pagination_rules': {'key': 'paginationRules', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, request_method=None, request_body=None, additional_headers=None, pagination_rules=None, http_request_timeout=None, request_interval=None, additional_columns=None, **kwargs) -> None: super(RestSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.request_body = request_body @@ -24166,6 +24632,7 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.pagination_rules = pagination_rules self.http_request_timeout = http_request_timeout self.request_interval = request_interval + self.additional_columns = additional_columns self.type = 'RestSource' @@ -24345,9 +24812,12 @@ class SalesforceLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -24369,15 +24839,17 @@ class SalesforceLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, api_version=None, encrypted_credential=None, **kwargs) -> None: super(SalesforceLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.environment_url = environment_url self.username = username self.password = password self.security_token = security_token + self.api_version = api_version self.encrypted_credential = encrypted_credential self.type = 'Salesforce' @@ -24546,6 +25018,11 @@ class SalesforceMarketingCloudSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -24562,11 +25039,12 @@ class SalesforceMarketingCloudSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SalesforceMarketingCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SalesforceMarketingCloudSource' @@ -24664,9 +25142,12 @@ class SalesforceServiceCloudLinkedService(LinkedService): :param password: The password for Basic authentication of the Salesforce instance. :type password: ~azure.mgmt.datafactory.models.SecretBase - :param security_token: The security token is required to remotely access + :param security_token: The security token is optional to remotely access Salesforce instance. :type security_token: ~azure.mgmt.datafactory.models.SecretBase + :param api_version: The Salesforce API version used in ADF. Type: string + (or Expression with resultType string). + :type api_version: object :param extended_properties: Extended properties appended to the connection string. Type: string (or Expression with resultType string). :type extended_properties: object @@ -24691,16 +25172,18 @@ class SalesforceServiceCloudLinkedService(LinkedService): 'username': {'key': 'typeProperties.username', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'security_token': {'key': 'typeProperties.securityToken', 'type': 'SecretBase'}, + 'api_version': {'key': 'typeProperties.apiVersion', 'type': 'object'}, 'extended_properties': {'key': 'typeProperties.extendedProperties', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, environment_url=None, username=None, password=None, security_token=None, api_version=None, extended_properties=None, encrypted_credential=None, **kwargs) -> None: super(SalesforceServiceCloudLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.environment_url = environment_url self.username = username self.password = password self.security_token = security_token + self.api_version = api_version self.extended_properties = extended_properties self.encrypted_credential = encrypted_credential self.type = 'SalesforceServiceCloud' @@ -24866,6 +25349,11 @@ class SalesforceServiceCloudSource(CopySource): Query. Possible values include: 'Query', 'QueryAll' :type read_behavior: str or ~azure.mgmt.datafactory.models.SalesforceSourceReadBehavior + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -24880,12 +25368,14 @@ class SalesforceServiceCloudSource(CopySource): 'type': {'key': 'type', 'type': 'str'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, read_behavior=None, additional_columns=None, **kwargs) -> None: super(SalesforceServiceCloudSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.query = query self.read_behavior = read_behavior + self.additional_columns = additional_columns self.type = 'SalesforceServiceCloudSource' @@ -24986,6 +25476,11 @@ class SalesforceSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -25006,12 +25501,13 @@ class SalesforceSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'read_behavior': {'key': 'readBehavior', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, read_behavior=None, **kwargs) -> None: - super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, read_behavior=None, **kwargs) -> None: + super(SalesforceSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.read_behavior = read_behavior self.type = 'SalesforceSource' @@ -25173,6 +25669,11 @@ class SapBwSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: MDX query. Type: string (or Expression with resultType string). :type query: object @@ -25189,11 +25690,12 @@ class SapBwSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SapBwSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SapBwSource' @@ -25403,6 +25905,11 @@ class SapCloudForCustomerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25419,11 +25926,12 @@ class SapCloudForCustomerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SapCloudForCustomerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SapCloudForCustomerSource' @@ -25578,6 +26086,11 @@ class SapEccSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType string). :type query: object @@ -25594,11 +26107,12 @@ class SapEccSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SapEccSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SapEccSource' @@ -25719,6 +26233,11 @@ class SapHanaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: SAP HANA Sql query. Type: string (or Expression with resultType string). :type query: object @@ -25747,14 +26266,15 @@ class SapHanaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'packet_size': {'key': 'packetSize', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'SapHanaPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, packet_size=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, packet_size=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapHanaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.packet_size = packet_size self.partition_option = partition_option @@ -25940,6 +26460,11 @@ class SapOpenHubSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param exclude_last_request: Whether to exclude the records of the last request. The default value is true. Type: boolean (or Expression with resultType boolean). @@ -25962,12 +26487,13 @@ class SapOpenHubSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'exclude_last_request': {'key': 'excludeLastRequest', 'type': 'object'}, 'base_request_id': {'key': 'baseRequestId', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: - super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, exclude_last_request=None, base_request_id=None, **kwargs) -> None: + super(SapOpenHubSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.exclude_last_request = exclude_last_request self.base_request_id = base_request_id self.type = 'SapOpenHubSource' @@ -26297,6 +26823,11 @@ class SapTableSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param row_count: The number of rows to be retrieved. Type: integer(or Expression with resultType integer). :type row_count: object @@ -26342,6 +26873,7 @@ class SapTableSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'row_count': {'key': 'rowCount', 'type': 'object'}, 'row_skips': {'key': 'rowSkips', 'type': 'object'}, 'rfc_table_fields': {'key': 'rfcTableFields', 'type': 'object'}, @@ -26352,8 +26884,8 @@ class SapTableSource(TabularSource): 'partition_settings': {'key': 'partitionSettings', 'type': 'SapTablePartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, row_count=None, row_skips=None, rfc_table_fields=None, rfc_table_options=None, batch_size=None, custom_rfc_read_table_function_module=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(SapTableSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.row_count = row_count self.row_skips = row_skips self.rfc_table_fields = rfc_table_fields @@ -27032,6 +27564,11 @@ class ServiceNowSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27048,11 +27585,12 @@ class ServiceNowSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ServiceNowSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ServiceNowSource' @@ -27162,6 +27700,10 @@ class SftpReadSettings(StoreReadSettings): :param wildcard_file_name: Sftp wildcardFileName. Type: string (or Expression with resultType string). :type wildcard_file_name: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object :param modified_datetime_start: The start of file's modified datetime. Type: string (or Expression with resultType string). :type modified_datetime_start: object @@ -27181,15 +27723,17 @@ class SftpReadSettings(StoreReadSettings): 'recursive': {'key': 'recursive', 'type': 'object'}, 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, file_list_path=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: super(SftpReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) self.recursive = recursive self.wildcard_folder_path = wildcard_folder_path self.wildcard_file_name = wildcard_file_name + self.file_list_path = file_list_path self.modified_datetime_start = modified_datetime_start self.modified_datetime_end = modified_datetime_end self.type = 'SftpReadSettings' @@ -27322,6 +27866,10 @@ class SftpWriteSettings(StoreWriteSettings): SFTP server. Default value: 01:00:00 (one hour). Type: string (or Expression with resultType string). :type operation_timeout: object + :param use_temp_file_rename: Upload to temporary file(s) and rename. + Disable this option if your SFTP server doesn't support rename operation. + Type: boolean (or Expression with resultType boolean). + :type use_temp_file_rename: object """ _validation = { @@ -27334,11 +27882,13 @@ class SftpWriteSettings(StoreWriteSettings): 'copy_behavior': {'key': 'copyBehavior', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'operation_timeout': {'key': 'operationTimeout', 'type': 'object'}, + 'use_temp_file_rename': {'key': 'useTempFileRename', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, operation_timeout=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, copy_behavior=None, operation_timeout=None, use_temp_file_rename=None, **kwargs) -> None: super(SftpWriteSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, copy_behavior=copy_behavior, **kwargs) self.operation_timeout = operation_timeout + self.use_temp_file_rename = use_temp_file_rename self.type = 'SftpWriteSettings' @@ -27501,6 +28051,11 @@ class ShopifySource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27517,15 +28072,356 @@ class ShopifySource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ShopifySource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ShopifySource' +class SkipErrorFile(Model): + """Skip error file. + + :param file_missing: Skip if file is deleted by other client during copy. + Default is true. Type: boolean (or Expression with resultType boolean). + :type file_missing: object + :param data_inconsistency: Skip if source/sink file changed by other + concurrent write. Default is false. Type: boolean (or Expression with + resultType boolean). + :type data_inconsistency: object + """ + + _attribute_map = { + 'file_missing': {'key': 'fileMissing', 'type': 'object'}, + 'data_inconsistency': {'key': 'dataInconsistency', 'type': 'object'}, + } + + def __init__(self, *, file_missing=None, data_inconsistency=None, **kwargs) -> None: + super(SkipErrorFile, self).__init__(**kwargs) + self.file_missing = file_missing + self.data_inconsistency = data_inconsistency + + +class SnowflakeDataset(Dataset): + """The snowflake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param snowflake_dataset_schema: The schema name of the Snowflake + database. Type: string (or Expression with resultType string). + :type snowflake_dataset_schema: object + :param table: The table name of the Snowflake database. Type: string (or + Expression with resultType string). + :type table: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'snowflake_dataset_schema': {'key': 'typeProperties.schema', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, snowflake_dataset_schema=None, table=None, **kwargs) -> None: + super(SnowflakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.snowflake_dataset_schema = snowflake_dataset_schema + self.table = table + self.type = 'SnowflakeTable' + + +class SnowflakeExportCopyCommand(ExportSettings): + """Snowflake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, *, additional_properties=None, additional_copy_options=None, additional_format_options=None, **kwargs) -> None: + super(SnowflakeExportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.additional_copy_options = additional_copy_options + self.additional_format_options = additional_format_options + self.type = 'SnowflakeExportCopyCommand' + + +class SnowflakeImportCopyCommand(ImportSettings): + """Snowflake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param additional_copy_options: Additional copy options directly passed to + snowflake Copy Command. Type: key value pairs (value should be string + type) (or Expression with resultType object). Example: + "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": + "'HH24:MI:SS.FF'" } + :type additional_copy_options: dict[str, object] + :param additional_format_options: Additional format options directly + passed to snowflake Copy Command. Type: key value pairs (value should be + string type) (or Expression with resultType object). Example: + "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": + "'FALSE'" } + :type additional_format_options: dict[str, object] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'additional_copy_options': {'key': 'additionalCopyOptions', 'type': '{object}'}, + 'additional_format_options': {'key': 'additionalFormatOptions', 'type': '{object}'}, + } + + def __init__(self, *, additional_properties=None, additional_copy_options=None, additional_format_options=None, **kwargs) -> None: + super(SnowflakeImportCopyCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.additional_copy_options = additional_copy_options + self.additional_format_options = additional_format_options + self.type = 'SnowflakeImportCopyCommand' + + +class SnowflakeLinkedService(LinkedService): + """Snowflake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param connection_string: Required. The connection string of snowflake. + Type: string, SecureString. + :type connection_string: object + :param password: The Azure key vault secret reference of password in + connection string. + :type password: + ~azure.mgmt.datafactory.models.AzureKeyVaultSecretReference + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'connection_string': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'AzureKeyVaultSecretReference'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(SnowflakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'Snowflake' + + +class SnowflakeSink(CopySink): + """A copy activity snowflake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Snowflake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.SnowflakeImportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'SnowflakeImportCopyCommand'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, import_settings=None, **kwargs) -> None: + super(SnowflakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + self.type = 'SnowflakeSink' + + +class SnowflakeSource(CopySource): + """A copy activity snowflake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Snowflake Sql query. Type: string (or Expression with + resultType string). + :type query: object + :param export_settings: Snowflake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.SnowflakeExportCopyCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'SnowflakeExportCopyCommand'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, export_settings=None, **kwargs) -> None: + super(SnowflakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.export_settings = export_settings + self.type = 'SnowflakeSource' + + class SparkLinkedService(LinkedService): """Spark Server linked service. @@ -27740,6 +28636,11 @@ class SparkSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -27756,11 +28657,12 @@ class SparkSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SparkSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SparkSource' @@ -27872,6 +28774,11 @@ class SqlDWSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL Data Warehouse reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -27897,13 +28804,14 @@ class SqlDWSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: + super(SqlDWSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -28016,6 +28924,11 @@ class SqlMISource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28042,14 +28955,15 @@ class SqlMISource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlMISource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -28225,6 +29139,11 @@ class SqlServerSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28251,14 +29170,15 @@ class SqlServerSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, 'produce_additional_types': {'key': 'produceAdditionalTypes', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: - super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, produce_additional_types=None, **kwargs) -> None: + super(SqlServerSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters @@ -28500,6 +29420,11 @@ class SqlSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param sql_reader_query: SQL reader query. Type: string (or Expression with resultType string). :type sql_reader_query: object @@ -28511,6 +29436,12 @@ class SqlSource(TabularSource): procedure parameters. Example: "{Parameter1: {value: "1", type: "int"}}". :type stored_procedure_parameters: dict[str, ~azure.mgmt.datafactory.models.StoredProcedureParameter] + :param isolation_level: Specifies the transaction locking behavior for the + SQL source. Allowed values: + ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The + default value is ReadCommitted. Type: string (or Expression with + resultType string). + :type isolation_level: object """ _validation = { @@ -28524,16 +29455,19 @@ class SqlSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'sql_reader_query': {'key': 'sqlReaderQuery', 'type': 'object'}, 'sql_reader_stored_procedure_name': {'key': 'sqlReaderStoredProcedureName', 'type': 'object'}, 'stored_procedure_parameters': {'key': 'storedProcedureParameters', 'type': '{StoredProcedureParameter}'}, + 'isolation_level': {'key': 'isolationLevel', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, **kwargs) -> None: - super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, sql_reader_query=None, sql_reader_stored_procedure_name=None, stored_procedure_parameters=None, isolation_level=None, **kwargs) -> None: + super(SqlSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.sql_reader_query = sql_reader_query self.sql_reader_stored_procedure_name = sql_reader_stored_procedure_name self.stored_procedure_parameters = stored_procedure_parameters + self.isolation_level = isolation_level self.type = 'SqlSource' @@ -28708,6 +29642,11 @@ class SquareSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -28724,11 +29663,12 @@ class SquareSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SquareSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SquareSource' @@ -29617,6 +30557,11 @@ class SybaseSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Database query. Type: string (or Expression with resultType string). :type query: object @@ -29633,11 +30578,12 @@ class SybaseSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(SybaseSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'SybaseSource' @@ -29829,6 +30775,11 @@ class TeradataSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: Teradata query. Type: string (or Expression with resultType string). :type query: object @@ -29854,13 +30805,14 @@ class TeradataSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, 'partition_option': {'key': 'partitionOption', 'type': 'str'}, 'partition_settings': {'key': 'partitionSettings', 'type': 'TeradataPartitionSettings'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: - super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, partition_option=None, partition_settings=None, **kwargs) -> None: + super(TeradataSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.partition_option = partition_option self.partition_settings = partition_settings @@ -30053,6 +31005,28 @@ def __init__(self, *, reference_trigger, **kwargs) -> None: self.type = 'TriggerDependencyReference' +class TriggerFilterParameters(Model): + """Query parameters for triggers. + + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger + to get the child rerun triggers + :type parent_trigger_name: str + """ + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + } + + def __init__(self, *, continuation_token: str=None, parent_trigger_name: str=None, **kwargs) -> None: + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.parent_trigger_name = parent_trigger_name + + class TriggerPipelineReference(Model): """Pipeline that needs to be triggered with the given parameters. @@ -30073,6 +31047,33 @@ def __init__(self, *, pipeline_reference=None, parameters=None, **kwargs) -> Non self.parameters = parameters +class TriggerQueryResponse(Model): + """A query of triggers. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next + page of results, if any remaining results exist, null otherwise. + :type continuation_token: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + } + + def __init__(self, *, value, continuation_token: str=None, **kwargs) -> None: + super(TriggerQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token + + class TriggerReference(Model): """Trigger reference type. @@ -30770,6 +31771,11 @@ class VerticaSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -30786,11 +31792,12 @@ class VerticaSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(VerticaSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'VerticaSource' @@ -31345,6 +32352,11 @@ class WebSource(CopySource): :type max_concurrent_connections: object :param type: Required. Constant filled by server. :type type: str + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] """ _validation = { @@ -31357,10 +32369,12 @@ class WebSource(CopySource): 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, additional_columns=None, **kwargs) -> None: super(WebSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.additional_columns = additional_columns self.type = 'WebSource' @@ -31596,6 +32610,11 @@ class XeroSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31612,11 +32631,12 @@ class XeroSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(XeroSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'XeroSource' @@ -31779,6 +32799,11 @@ class ZohoSource(TabularSource): resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). :type query_timeout: object + :param additional_columns: Specifies the additional columns to be added to + source data. Type: array of objects (or Expression with resultType array + of objects). + :type additional_columns: + list[~azure.mgmt.datafactory.models.AdditionalColumns] :param query: A query to retrieve data from source. Type: string (or Expression with resultType string). :type query: object @@ -31795,10 +32820,11 @@ class ZohoSource(TabularSource): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'query_timeout': {'key': 'queryTimeout', 'type': 'object'}, + 'additional_columns': {'key': 'additionalColumns', 'type': '[AdditionalColumns]'}, 'query': {'key': 'query', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, query=None, **kwargs) -> None: - super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, **kwargs) + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query_timeout=None, additional_columns=None, query=None, **kwargs) -> None: + super(ZohoSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, query_timeout=query_timeout, additional_columns=additional_columns, **kwargs) self.query = query self.type = 'ZohoSource' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py index f78455cfdb9a..9a46a2afb4ca 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py @@ -103,19 +103,6 @@ class TriggerResourcePaged(Paged): def __init__(self, *args, **kwargs): super(TriggerResourcePaged, self).__init__(*args, **kwargs) -class RerunTriggerResourcePaged(Paged): - """ - A paging container for iterating over a list of :class:`RerunTriggerResource ` object - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'current_page': {'key': 'value', 'type': '[RerunTriggerResource]'} - } - - def __init__(self, *args, **kwargs): - - super(RerunTriggerResourcePaged, self).__init__(*args, **kwargs) class DataFlowResourcePaged(Paged): """ A paging container for iterating over a list of :class:`DataFlowResource ` object diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 619150f2d6a8..59e9feaff462 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -22,7 +22,6 @@ from ._activity_runs_operations import ActivityRunsOperations from ._triggers_operations import TriggersOperations from ._trigger_runs_operations import TriggerRunsOperations -from ._rerun_triggers_operations import RerunTriggersOperations from ._data_flows_operations import DataFlowsOperations from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations @@ -40,7 +39,6 @@ 'ActivityRunsOperations', 'TriggersOperations', 'TriggerRunsOperations', - 'RerunTriggersOperations', 'DataFlowsOperations', 'DataFlowDebugSessionOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py index 00201749beee..1f24fba6ac9c 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_pipelines_operations.py @@ -314,7 +314,7 @@ def delete( delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/pipelines/{pipelineName}'} def create_run( - self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, parameters=None, custom_headers=None, raw=False, **operation_config): + self, resource_group_name, factory_name, pipeline_name, reference_pipeline_run_id=None, is_recovery=None, start_activity_name=None, start_from_failure=None, parameters=None, custom_headers=None, raw=False, **operation_config): """Creates a run of a pipeline. :param resource_group_name: The resource group name. @@ -334,6 +334,10 @@ def create_run( :param start_activity_name: In recovery mode, the rerun will start from this activity. If not specified, all activities will run. :type start_activity_name: str + :param start_from_failure: In recovery mode, if set to true, the rerun + will start from failed activities. The property will be used only if + startActivityName is not specified. + :type start_from_failure: bool :param parameters: Parameters of the pipeline run. These parameters will be used only if the runId is not specified. :type parameters: dict[str, object] @@ -366,6 +370,8 @@ def create_run( query_parameters['isRecovery'] = self._serialize.query("is_recovery", is_recovery, 'bool') if start_activity_name is not None: query_parameters['startActivityName'] = self._serialize.query("start_activity_name", start_activity_name, 'str') + if start_from_failure is not None: + query_parameters['startFromFailure'] = self._serialize.query("start_from_failure", start_from_failure, 'bool') # Construct headers header_parameters = {} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py deleted file mode 100644 index 6d5f8e9831de..000000000000 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_rerun_triggers_operations.py +++ /dev/null @@ -1,453 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for -# license information. -# -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is -# regenerated. -# -------------------------------------------------------------------------- - -import uuid -from msrest.pipeline import ClientRawResponse -from msrestazure.azure_exceptions import CloudError -from msrest.polling import LROPoller, NoPolling -from msrestazure.polling.arm_polling import ARMPolling - -from .. import models - - -class RerunTriggersOperations(object): - """RerunTriggersOperations operations. - - You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. - - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. - :ivar api_version: The API version. Constant value: "2018-06-01". - """ - - models = models - - def __init__(self, client, config, serializer, deserializer): - - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self.api_version = "2018-06-01" - - self.config = config - - def create( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, rerun_tumbling_window_trigger_action_parameters, custom_headers=None, raw=False, **operation_config): - """Creates a rerun trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param rerun_tumbling_window_trigger_action_parameters: Rerun tumbling - window trigger action parameters. - :type rerun_tumbling_window_trigger_action_parameters: - ~azure.mgmt.datafactory.models.RerunTumblingWindowTriggerActionParameters - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: TriggerResource or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.datafactory.models.TriggerResource or - ~msrest.pipeline.ClientRawResponse - :raises: :class:`CloudError` - """ - # Construct URL - url = self.create.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - header_parameters['Content-Type'] = 'application/json; charset=utf-8' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct body - body_content = self._serialize.body(rerun_tumbling_window_trigger_action_parameters, 'RerunTumblingWindowTriggerActionParameters') - - # Construct and send request - request = self._client.put(url, query_parameters, header_parameters, body_content) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - deserialized = None - if response.status_code == 200: - deserialized = self._deserialize('TriggerResource', response) - - if raw: - client_raw_response = ClientRawResponse(deserialized, response) - return client_raw_response - - return deserialized - create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}'} - - - def _start_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.start.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def start( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Starts a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._start_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/start'} - - - def _stop_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.stop.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def stop( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Stops a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._stop_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/stop'} - - - def _cancel_initial( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, **operation_config): - # Construct URL - url = self.cancel.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), - 'rerunTriggerName': self._serialize.url("rerun_trigger_name", rerun_trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - # Construct headers - header_parameters = {} - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.post(url, query_parameters, header_parameters) - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - def cancel( - self, resource_group_name, factory_name, trigger_name, rerun_trigger_name, custom_headers=None, raw=False, polling=True, **operation_config): - """Cancels a trigger. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param rerun_trigger_name: The rerun trigger name. - :type rerun_trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: The poller return type is ClientRawResponse, the - direct response alongside the deserialized response - :param polling: True for ARMPolling, False for no polling, or a - polling object for personal polling strategy - :return: An instance of LROPoller that returns None or - ClientRawResponse if raw==True - :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or - ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] - :raises: :class:`CloudError` - """ - raw_result = self._cancel_initial( - resource_group_name=resource_group_name, - factory_name=factory_name, - trigger_name=trigger_name, - rerun_trigger_name=rerun_trigger_name, - custom_headers=custom_headers, - raw=True, - **operation_config - ) - - def get_long_running_output(response): - if raw: - client_raw_response = ClientRawResponse(None, response) - return client_raw_response - - lro_delay = operation_config.get( - 'long_running_operation_timeout', - self.config.long_running_operation_timeout) - if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) - cancel.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers/{rerunTriggerName}/cancel'} - - def list_by_trigger( - self, resource_group_name, factory_name, trigger_name, custom_headers=None, raw=False, **operation_config): - """Lists rerun triggers by an original trigger name. - - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param trigger_name: The trigger name. - :type trigger_name: str - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: An iterator like instance of RerunTriggerResource - :rtype: - ~azure.mgmt.datafactory.models.RerunTriggerResourcePaged[~azure.mgmt.datafactory.models.RerunTriggerResource] - :raises: :class:`CloudError` - """ - def prepare_request(next_link=None): - if not next_link: - # Construct URL - url = self.list_by_trigger.metadata['url'] - path_format_arguments = { - 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), - 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), - 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), - 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$') - } - url = self._client.format_url(url, **path_format_arguments) - - # Construct parameters - query_parameters = {} - query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') - - else: - url = next_link - query_parameters = {} - - # Construct headers - header_parameters = {} - header_parameters['Accept'] = 'application/json' - if self.config.generate_client_request_id: - header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) - if custom_headers: - header_parameters.update(custom_headers) - if self.config.accept_language is not None: - header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') - - # Construct and send request - request = self._client.get(url, query_parameters, header_parameters) - return request - - def internal_paging(next_link=None): - request = prepare_request(next_link) - - response = self._client.send(request, stream=False, **operation_config) - - if response.status_code not in [200]: - exp = CloudError(response) - exp.request_id = response.headers.get('x-ms-request-id') - raise exp - - return response - - # Deserialize response - header_dict = None - if raw: - header_dict = {} - deserialized = models.RerunTriggerResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) - - return deserialized - list_by_trigger.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/rerunTriggers'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py index 57e31b1bd8c9..4554f5f7f71b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_triggers_operations.py @@ -113,6 +113,79 @@ def internal_paging(next_link=None): return deserialized list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} + def query_by_factory( + self, resource_group_name, factory_name, continuation_token=None, parent_trigger_name=None, custom_headers=None, raw=False, **operation_config): + """Query triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param continuation_token: The continuation token for getting the next + page of results. Null for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent + TumblingWindowTrigger to get the child rerun triggers + :type parent_trigger_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: TriggerQueryResponse or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token, parent_trigger_name=parent_trigger_name) + + # Construct URL + url = self.query_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(filter_parameters, 'TriggerFilterParameters') + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerQueryResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} + def create_or_update( self, resource_group_name, factory_name, trigger_name, properties, if_match=None, custom_headers=None, raw=False, **operation_config): """Creates or updates a trigger.