diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 95dc22d97aad..d0fa095742b1 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -48,6 +48,12 @@ from ._models_py3 import AzureBlobStorageLocation from ._models_py3 import AzureBlobStorageReadSettings from ._models_py3 import AzureBlobStorageWriteSettings + from ._models_py3 import AzureDatabricksDeltaLakeDataset + from ._models_py3 import AzureDatabricksDeltaLakeExportCommand + from ._models_py3 import AzureDatabricksDeltaLakeImportCommand + from ._models_py3 import AzureDatabricksDeltaLakeLinkedService + from ._models_py3 import AzureDatabricksDeltaLakeSink + from ._models_py3 import AzureDatabricksDeltaLakeSource from ._models_py3 import AzureDatabricksLinkedService from ._models_py3 import AzureDataExplorerCommandActivity from ._models_py3 import AzureDataExplorerLinkedService @@ -131,6 +137,7 @@ from ._models_py3 import CopyActivity from ._models_py3 import CopySink from ._models_py3 import CopySource + from ._models_py3 import CopyTranslator from ._models_py3 import CosmosDbLinkedService from ._models_py3 import CosmosDbMongoDbApiCollectionDataset from ._models_py3 import CosmosDbMongoDbApiLinkedService @@ -173,6 +180,7 @@ from ._models_py3 import Dataset from ._models_py3 import DatasetBZip2Compression from ._models_py3 import DatasetCompression + from ._models_py3 import DatasetDataElement from ._models_py3 import DatasetDebugResource from ._models_py3 import DatasetDeflateCompression from ._models_py3 import DatasetFolder @@ -180,7 +188,10 @@ from ._models_py3 import DatasetLocation from ._models_py3 import DatasetReference from ._models_py3 import DatasetResource + from ._models_py3 import DatasetSchemaDataElement from ._models_py3 import DatasetStorageFormat + from ._models_py3 import DatasetTarCompression + from ._models_py3 import DatasetTarGZipCompression from ._models_py3 import DatasetZipDeflateCompression from ._models_py3 import Db2LinkedService from ._models_py3 import Db2Source @@ -411,12 +422,14 @@ from ._models_py3 import OrcFormat from ._models_py3 import OrcSink from ._models_py3 import OrcSource + from ._models_py3 import OrcWriteSettings from ._models_py3 import PackageStore from ._models_py3 import ParameterSpecification from ._models_py3 import ParquetDataset from ._models_py3 import ParquetFormat from ._models_py3 import ParquetSink from ._models_py3 import ParquetSource + from ._models_py3 import ParquetWriteSettings from ._models_py3 import PaypalLinkedService from ._models_py3 import PaypalObjectDataset from ._models_py3 import PaypalSource @@ -567,6 +580,9 @@ from ._models_py3 import SybaseSource from ._models_py3 import SybaseTableDataset from ._models_py3 import TabularSource + from ._models_py3 import TabularTranslator + from ._models_py3 import TarGZipReadSettings + from ._models_py3 import TarReadSettings from ._models_py3 import TeradataLinkedService from ._models_py3 import TeradataPartitionSettings from ._models_py3 import TeradataSource @@ -585,6 +601,7 @@ from ._models_py3 import TriggerSubscriptionOperationStatus from ._models_py3 import TumblingWindowTrigger from ._models_py3 import TumblingWindowTriggerDependencyReference + from ._models_py3 import TypeConversionSettings from ._models_py3 import UntilActivity from ._models_py3 import UpdateIntegrationRuntimeNodeRequest from ._models_py3 import UpdateIntegrationRuntimeRequest @@ -655,6 +672,12 @@ from ._models import AzureBlobStorageLocation from ._models import AzureBlobStorageReadSettings from ._models import AzureBlobStorageWriteSettings + from ._models import AzureDatabricksDeltaLakeDataset + from ._models import AzureDatabricksDeltaLakeExportCommand + from ._models import AzureDatabricksDeltaLakeImportCommand + from ._models import AzureDatabricksDeltaLakeLinkedService + from ._models import AzureDatabricksDeltaLakeSink + from ._models import AzureDatabricksDeltaLakeSource from ._models import AzureDatabricksLinkedService from ._models import AzureDataExplorerCommandActivity from ._models import AzureDataExplorerLinkedService @@ -738,6 +761,7 @@ from ._models import CopyActivity from ._models import CopySink from ._models import CopySource + from ._models import CopyTranslator from ._models import CosmosDbLinkedService from ._models import CosmosDbMongoDbApiCollectionDataset from ._models import CosmosDbMongoDbApiLinkedService @@ -780,6 +804,7 @@ from ._models import Dataset from ._models import DatasetBZip2Compression from ._models import DatasetCompression + from ._models import DatasetDataElement from ._models import DatasetDebugResource from ._models import DatasetDeflateCompression from ._models import DatasetFolder @@ -787,7 +812,10 @@ from ._models import DatasetLocation from ._models import DatasetReference from ._models import DatasetResource + from ._models import DatasetSchemaDataElement from ._models import DatasetStorageFormat + from ._models import DatasetTarCompression + from ._models import DatasetTarGZipCompression from ._models import DatasetZipDeflateCompression from ._models import Db2LinkedService from ._models import Db2Source @@ -1018,12 +1046,14 @@ from ._models import OrcFormat from ._models import OrcSink from ._models import OrcSource + from ._models import OrcWriteSettings from ._models import PackageStore from ._models import ParameterSpecification from ._models import ParquetDataset from ._models import ParquetFormat from ._models import ParquetSink from ._models import ParquetSource + from ._models import ParquetWriteSettings from ._models import PaypalLinkedService from ._models import PaypalObjectDataset from ._models import PaypalSource @@ -1174,6 +1204,9 @@ from ._models import SybaseSource from ._models import SybaseTableDataset from ._models import TabularSource + from ._models import TabularTranslator + from ._models import TarGZipReadSettings + from ._models import TarReadSettings from ._models import TeradataLinkedService from ._models import TeradataPartitionSettings from ._models import TeradataSource @@ -1192,6 +1225,7 @@ from ._models import TriggerSubscriptionOperationStatus from ._models import TumblingWindowTrigger from ._models import TumblingWindowTriggerDependencyReference + from ._models import TypeConversionSettings from ._models import UntilActivity from ._models import UpdateIntegrationRuntimeNodeRequest from ._models import UpdateIntegrationRuntimeRequest @@ -1297,6 +1331,7 @@ SsisPackageLocationType, HDInsightActivityDebugInfoOption, SalesforceSinkWriteBehavior, + DynamicsSinkWriteBehavior, AzureSearchIndexWriteBehaviorType, PolybaseSettingsRejectType, JsonWriteFilePattern, @@ -1313,6 +1348,7 @@ IntegrationRuntimeEdition, SsisObjectMetadataType, IntegrationRuntimeAuthKeyName, + CopyBehaviorType, ) __all__ = [ @@ -1354,6 +1390,12 @@ 'AzureBlobStorageLocation', 'AzureBlobStorageReadSettings', 'AzureBlobStorageWriteSettings', + 'AzureDatabricksDeltaLakeDataset', + 'AzureDatabricksDeltaLakeExportCommand', + 'AzureDatabricksDeltaLakeImportCommand', + 'AzureDatabricksDeltaLakeLinkedService', + 'AzureDatabricksDeltaLakeSink', + 'AzureDatabricksDeltaLakeSource', 'AzureDatabricksLinkedService', 'AzureDataExplorerCommandActivity', 'AzureDataExplorerLinkedService', @@ -1437,6 +1479,7 @@ 'CopyActivity', 'CopySink', 'CopySource', + 'CopyTranslator', 'CosmosDbLinkedService', 'CosmosDbMongoDbApiCollectionDataset', 'CosmosDbMongoDbApiLinkedService', @@ -1479,6 +1522,7 @@ 'Dataset', 'DatasetBZip2Compression', 'DatasetCompression', + 'DatasetDataElement', 'DatasetDebugResource', 'DatasetDeflateCompression', 'DatasetFolder', @@ -1486,7 +1530,10 @@ 'DatasetLocation', 'DatasetReference', 'DatasetResource', + 'DatasetSchemaDataElement', 'DatasetStorageFormat', + 'DatasetTarCompression', + 'DatasetTarGZipCompression', 'DatasetZipDeflateCompression', 'Db2LinkedService', 'Db2Source', @@ -1717,12 +1764,14 @@ 'OrcFormat', 'OrcSink', 'OrcSource', + 'OrcWriteSettings', 'PackageStore', 'ParameterSpecification', 'ParquetDataset', 'ParquetFormat', 'ParquetSink', 'ParquetSource', + 'ParquetWriteSettings', 'PaypalLinkedService', 'PaypalObjectDataset', 'PaypalSource', @@ -1873,6 +1922,9 @@ 'SybaseSource', 'SybaseTableDataset', 'TabularSource', + 'TabularTranslator', + 'TarGZipReadSettings', + 'TarReadSettings', 'TeradataLinkedService', 'TeradataPartitionSettings', 'TeradataSource', @@ -1891,6 +1943,7 @@ 'TriggerSubscriptionOperationStatus', 'TumblingWindowTrigger', 'TumblingWindowTriggerDependencyReference', + 'TypeConversionSettings', 'UntilActivity', 'UpdateIntegrationRuntimeNodeRequest', 'UpdateIntegrationRuntimeRequest', @@ -1995,6 +2048,7 @@ 'SsisPackageLocationType', 'HDInsightActivityDebugInfoOption', 'SalesforceSinkWriteBehavior', + 'DynamicsSinkWriteBehavior', 'AzureSearchIndexWriteBehaviorType', 'PolybaseSettingsRejectType', 'JsonWriteFilePattern', @@ -2011,4 +2065,5 @@ 'IntegrationRuntimeEdition', 'SsisObjectMetadataType', 'IntegrationRuntimeAuthKeyName', + 'CopyBehaviorType', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 4eb33cce9961..73144188ac4e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -320,6 +320,7 @@ class OrcCompressionCodec(str, Enum): none = "none" zlib = "zlib" snappy = "snappy" + lzo = "lzo" class AvroCompressionCodec(str, Enum): @@ -499,6 +500,11 @@ class SalesforceSinkWriteBehavior(str, Enum): upsert = "Upsert" +class DynamicsSinkWriteBehavior(str, Enum): + + upsert = "Upsert" + + class AzureSearchIndexWriteBehaviorType(str, Enum): merge = "Merge" @@ -605,3 +611,10 @@ class IntegrationRuntimeAuthKeyName(str, Enum): auth_key1 = "authKey1" auth_key2 = "authKey2" + + +class CopyBehaviorType(str, Enum): + + preserve_hierarchy = "PreserveHierarchy" + flatten_hierarchy = "FlattenHierarchy" + merge_files = "MergeFiles" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index d712ea8c161c..a854cc4fcb44 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -334,21 +334,21 @@ class LinkedService(Model): AzureFunctionLinkedService, AzureDataExplorerLinkedService, SapTableLinkedService, GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - AzureMariaDBLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, + ResponsysLinkedService, AzureDatabricksDeltaLakeLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, @@ -403,7 +403,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): @@ -515,7 +515,8 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListResourceDataset, SnowflakeDataset, + sub-classes are: AzureDatabricksDeltaLakeDataset, + SharePointOnlineListResourceDataset, SnowflakeDataset, GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, @@ -597,7 +598,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, **kwargs): @@ -677,7 +678,8 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListSource, SnowflakeSource, HttpSource, + sub-classes are: SharePointOnlineListSource, + AzureDatabricksDeltaLakeSource, SnowflakeSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, FileSystemSource, @@ -721,7 +723,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} + 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} } def __init__(self, **kwargs): @@ -1708,8 +1710,8 @@ class CopySink(Model): SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, RestSink, - OrcSink, JsonSink, DelimitedTextSink + SapCloudForCustomerSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, + AzurePostgreSqlSink, RestSink, OrcSink, JsonSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -1753,7 +1755,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -1880,7 +1882,7 @@ class FormatWriteSettings(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: JsonWriteSettings, DelimitedTextWriteSettings, - AvroWriteSettings + OrcWriteSettings, AvroWriteSettings, ParquetWriteSettings All required parameters must be populated in order to send to Azure. @@ -1901,7 +1903,7 @@ class FormatWriteSettings(Model): } _subtype_map = { - 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings'} + 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__(self, **kwargs): @@ -1925,6 +1927,15 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -1936,12 +1947,16 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__(self, **kwargs): super(AvroWriteSettings, self).__init__(**kwargs) self.record_name = kwargs.get('record_name', None) self.record_namespace = kwargs.get('record_namespace', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) self.type = 'AvroWriteSettings' @@ -2896,6 +2911,391 @@ def __init__(self, **kwargs): self.type = 'AzureBlobStorageWriteSettings' +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The name of delta table. Type: string (or Expression with + resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or + Expression with resultType string). + :type database: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeDataset, self).__init__(**kwargs) + self.table = kwargs.get('table', None) + self.database = kwargs.get('database', None) + self.type = 'AzureDatabricksDeltaLakeDataset' + + +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand, + AzureDatabricksDeltaLakeExportCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand', 'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand'} + } + + def __init__(self, **kwargs): + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(**kwargs) + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + self.type = 'AzureDatabricksDeltaLakeExportCommand' + + +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, + SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, **kwargs): + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks + Delta Lake Copy. Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(**kwargs) + self.date_format = kwargs.get('date_format', None) + self.timestamp_format = kwargs.get('timestamp_format', None) + self.type = 'AzureDatabricksDeltaLakeImportCommand' + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(**kwargs) + self.domain = kwargs.get('domain', None) + self.access_token = kwargs.get('access_token', None) + self.cluster_id = kwargs.get('cluster_id', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureDatabricksDeltaLake' + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeSink, self).__init__(**kwargs) + self.pre_copy_script = kwargs.get('pre_copy_script', None) + self.import_settings = kwargs.get('import_settings', None) + self.type = 'AzureDatabricksDeltaLakeSink' + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Azure Databricks Delta Lake Sql query. Type: string (or + Expression with resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__(self, **kwargs): + super(AzureDatabricksDeltaLakeSource, self).__init__(**kwargs) + self.query = kwargs.get('query', None) + self.export_settings = kwargs.get('export_settings', None) + self.type = 'AzureDatabricksDeltaLakeSource' + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -7998,7 +8398,8 @@ class CompressionReadSettings(Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings + sub-classes are: TarGZipReadSettings, TarReadSettings, + ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. @@ -8019,7 +8420,7 @@ class CompressionReadSettings(Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__(self, **kwargs): @@ -8385,6 +8786,40 @@ def __init__(self, **kwargs): self.type = 'Copy' +class CopyTranslator(Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__(self, **kwargs): + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + + class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. @@ -10220,7 +10655,8 @@ class DatasetCompression(Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + sub-classes are: DatasetTarGZipCompression, DatasetTarCompression, + DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression All required parameters must be populated in order to send to Azure. @@ -10242,7 +10678,7 @@ class DatasetCompression(Model): } _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + 'type': {'TarGZip': 'DatasetTarGZipCompression', 'Tar': 'DatasetTarCompression', 'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } def __init__(self, **kwargs): @@ -10277,6 +10713,28 @@ def __init__(self, **kwargs): self.type = 'BZip2' +class DatasetDataElement(Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetDataElement, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + class DatasetDebugResource(SubResourceDebugResource): """Dataset debug resource. @@ -10397,63 +10855,146 @@ class DatasetReference(Model): """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, **kwargs): + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = kwargs.get('reference_name', None) + self.parameters = kwargs.get('parameters', None) + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, **kwargs): + super(DatasetResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class DatasetSchemaDataElement(Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.name = kwargs.get('name', None) + self.type = kwargs.get('type', None) + + +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "DatasetReference" - def __init__(self, **kwargs): - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = kwargs.get('reference_name', None) - self.parameters = kwargs.get('parameters', None) - + super(DatasetTarCompression, self).__init__(**kwargs) + self.type = 'Tar' -class DatasetResource(SubResource): - """Dataset resource type. - Variables are only populated by the server, and will be ignored when - sending a request. +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + 'level': {'key': 'level', 'type': 'object'}, } def __init__(self, **kwargs): - super(DatasetResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) + super(DatasetTarGZipCompression, self).__init__(**kwargs) + self.level = kwargs.get('level', None) + self.type = 'TarGZip' class DatasetZipDeflateCompression(DatasetCompression): @@ -11081,6 +11622,15 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -11093,12 +11643,16 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } def __init__(self, **kwargs): super(DelimitedTextWriteSettings, self).__init__(**kwargs) self.quote_all_text = kwargs.get('quote_all_text', None) self.file_extension = kwargs.get('file_extension', None) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) self.type = 'DelimitedTextWriteSettings' @@ -13047,40 +13601,6 @@ def __init__(self, **kwargs): self.type = 'ExecuteSSISPackage' -class ExportSettings(Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__(self, **kwargs): - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - - class ExposureControlBatchRequest(Model): """A list of exposure control features. @@ -17577,40 +18097,6 @@ def __init__(self, **kwargs): self.type = 'ImpalaSource' -class ImportSettings(Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__(self, **kwargs): - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = None - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -22997,7 +23483,7 @@ class OrcDataset(Dataset): :param location: Required. The location of the ORC data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param orc_compression_codec: Possible values include: 'none', 'zlib', - 'snappy' + 'snappy', 'lzo' :type orc_compression_codec: str or ~azure.mgmt.datafactory.models.OrcCompressionCodec """ @@ -23093,6 +23579,8 @@ class OrcSink(CopySink): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -23108,11 +23596,13 @@ class OrcSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } def __init__(self, **kwargs): super(OrcSink, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'OrcSink' @@ -23167,6 +23657,45 @@ def __init__(self, **kwargs): self.type = 'OrcSource' +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OrcWriteSettings, self).__init__(**kwargs) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + self.type = 'OrcWriteSettings' + + class PackageStore(Model): """Package store for the SSIS integration runtime. @@ -23351,6 +23880,8 @@ class ParquetSink(CopySink): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -23366,11 +23897,13 @@ class ParquetSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } def __init__(self, **kwargs): super(ParquetSink, self).__init__(**kwargs) self.store_settings = kwargs.get('store_settings', None) + self.format_settings = kwargs.get('format_settings', None) self.type = 'ParquetSink' @@ -23425,6 +23958,45 @@ def __init__(self, **kwargs): self.type = 'ParquetSource' +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(ParquetWriteSettings, self).__init__(**kwargs) + self.max_rows_per_file = kwargs.get('max_rows_per_file', None) + self.file_name_prefix = kwargs.get('file_name_prefix', None) + self.type = 'ParquetWriteSettings' + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -25546,14 +26118,10 @@ class RestSink(CopySink): :param request_interval: The time to await before sending next request, in milliseconds :type request_interval: object - :param compression_type: Compression Type to Send data in compressed - format with Optimal Compression Level, Default is None. And The Only - Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an - Object before calling the rest endpoint , Default is false. ex: if true - request content sample format is { rows:[]} else the format is [] - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in + compressed format with Optimal Compression Level, Default is None. And The + Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -25572,8 +26140,7 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } def __init__(self, **kwargs): @@ -25582,8 +26149,7 @@ def __init__(self, **kwargs): self.additional_headers = kwargs.get('additional_headers', None) self.http_request_timeout = kwargs.get('http_request_timeout', None) self.request_interval = kwargs.get('request_interval', None) - self.compression_type = kwargs.get('compression_type', None) - self.wrap_request_json_in_an_object = kwargs.get('wrap_request_json_in_an_object', None) + self.http_compression_type = kwargs.get('http_compression_type', None) self.type = 'RestSink' @@ -32041,6 +32607,142 @@ def __init__(self, **kwargs): self.type = 'SybaseTable' +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, + Group: MyGroup, Name: MyName" Type: string (or Expression with resultType + string). This property will be retired. Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and + hierarchical data. Example: {"Column1": "$.Column1", "Column2": + "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or + Expression with resultType object). This property will be retired. Please + use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and + object) values to simple strings in json format. Type: boolean (or + Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular + example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object + :param type_conversion: Whether to enable the advanced type conversion + feature in the Copy activity. Type: boolean (or Expression with resultType + boolean). + :type type_conversion: object + :param type_conversion_settings: Type conversion settings + :type type_conversion_settings: + ~azure.mgmt.datafactory.models.TypeConversionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, + 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, + 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, + } + + def __init__(self, **kwargs): + super(TabularTranslator, self).__init__(**kwargs) + self.column_mappings = kwargs.get('column_mappings', None) + self.schema_mapping = kwargs.get('schema_mapping', None) + self.collection_reference = kwargs.get('collection_reference', None) + self.map_complex_values_to_string = kwargs.get('map_complex_values_to_string', None) + self.mappings = kwargs.get('mappings', None) + self.type_conversion = kwargs.get('type_conversion', None) + self.type_conversion_settings = kwargs.get('type_conversion_settings', None) + self.type = 'TabularTranslator' + + +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TarGZipReadSettings, self).__init__(**kwargs) + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + self.type = 'TarGZipReadSettings' + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TarReadSettings, self).__init__(**kwargs) + self.preserve_compression_file_name_as_folder = kwargs.get('preserve_compression_file_name_as_folder', None) + self.type = 'TarReadSettings' + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -32811,6 +33513,49 @@ def __init__(self, **kwargs): self.type = 'TumblingWindowTriggerDependencyReference' +class TypeConversionSettings(Model): + """Type conversion settings. + + :param allow_data_truncation: Whether to allow data truncation when + converting the data. Type: boolean (or Expression with resultType + boolean). + :type allow_data_truncation: object + :param treat_boolean_as_number: Whether to treat boolean values as + numbers. Type: boolean (or Expression with resultType boolean). + :type treat_boolean_as_number: object + :param date_time_format: The format for DateTime values. Type: string (or + Expression with resultType string). + :type date_time_format: object + :param date_time_offset_format: The format for DateTimeOffset values. + Type: string (or Expression with resultType string). + :type date_time_offset_format: object + :param time_span_format: The format for TimeSpan values. Type: string (or + Expression with resultType string). + :type time_span_format: object + :param culture: The culture used to convert data from/to string. Type: + string (or Expression with resultType string). + :type culture: object + """ + + _attribute_map = { + 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, + 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, + 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, + 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, + 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, + 'culture': {'key': 'culture', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(TypeConversionSettings, self).__init__(**kwargs) + self.allow_data_truncation = kwargs.get('allow_data_truncation', None) + self.treat_boolean_as_number = kwargs.get('treat_boolean_as_number', None) + self.date_time_format = kwargs.get('date_time_format', None) + self.date_time_offset_format = kwargs.get('date_time_offset_format', None) + self.time_span_format = kwargs.get('time_span_format', None) + self.culture = kwargs.get('culture', None) + + class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 8723c634a3e4..7153b09f4fab 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -334,21 +334,21 @@ class LinkedService(Model): AzureFunctionLinkedService, AzureDataExplorerLinkedService, SapTableLinkedService, GoogleAdWordsLinkedService, OracleServiceCloudLinkedService, DynamicsAXLinkedService, - ResponsysLinkedService, AzureDatabricksLinkedService, - AzureDataLakeAnalyticsLinkedService, HDInsightOnDemandLinkedService, - SalesforceMarketingCloudLinkedService, NetezzaLinkedService, - VerticaLinkedService, ZohoLinkedService, XeroLinkedService, - SquareLinkedService, SparkLinkedService, ShopifyLinkedService, - ServiceNowLinkedService, QuickBooksLinkedService, PrestoLinkedService, - PhoenixLinkedService, PaypalLinkedService, MarketoLinkedService, - AzureMariaDBLinkedService, MariaDBLinkedService, MagentoLinkedService, - JiraLinkedService, ImpalaLinkedService, HubspotLinkedService, - HiveLinkedService, HBaseLinkedService, GreenplumLinkedService, - GoogleBigQueryLinkedService, EloquaLinkedService, DrillLinkedService, - CouchbaseLinkedService, ConcurLinkedService, AzurePostgreSqlLinkedService, - AmazonMWSLinkedService, SapHanaLinkedService, SapBWLinkedService, - SftpServerLinkedService, FtpServerLinkedService, HttpLinkedService, - AzureSearchLinkedService, CustomDataSourceLinkedService, + ResponsysLinkedService, AzureDatabricksDeltaLakeLinkedService, + AzureDatabricksLinkedService, AzureDataLakeAnalyticsLinkedService, + HDInsightOnDemandLinkedService, SalesforceMarketingCloudLinkedService, + NetezzaLinkedService, VerticaLinkedService, ZohoLinkedService, + XeroLinkedService, SquareLinkedService, SparkLinkedService, + ShopifyLinkedService, ServiceNowLinkedService, QuickBooksLinkedService, + PrestoLinkedService, PhoenixLinkedService, PaypalLinkedService, + MarketoLinkedService, AzureMariaDBLinkedService, MariaDBLinkedService, + MagentoLinkedService, JiraLinkedService, ImpalaLinkedService, + HubspotLinkedService, HiveLinkedService, HBaseLinkedService, + GreenplumLinkedService, GoogleBigQueryLinkedService, EloquaLinkedService, + DrillLinkedService, CouchbaseLinkedService, ConcurLinkedService, + AzurePostgreSqlLinkedService, AmazonMWSLinkedService, SapHanaLinkedService, + SapBWLinkedService, SftpServerLinkedService, FtpServerLinkedService, + HttpLinkedService, AzureSearchLinkedService, CustomDataSourceLinkedService, AmazonRedshiftLinkedService, AmazonS3LinkedService, RestServiceLinkedService, SapOpenHubLinkedService, SapEccLinkedService, SapCloudForCustomerLinkedService, SalesforceServiceCloudLinkedService, @@ -403,7 +403,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: @@ -515,7 +515,8 @@ class Dataset(Model): data stores, such as tables, files, folders, and documents. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListResourceDataset, SnowflakeDataset, + sub-classes are: AzureDatabricksDeltaLakeDataset, + SharePointOnlineListResourceDataset, SnowflakeDataset, GoogleAdWordsObjectDataset, AzureDataExplorerTableDataset, OracleServiceCloudObjectDataset, DynamicsAXResourceDataset, ResponsysObjectDataset, SalesforceMarketingCloudObjectDataset, @@ -597,7 +598,7 @@ class Dataset(Model): } _subtype_map = { - 'type': {'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} + 'type': {'AzureDatabricksDeltaLakeDataset': 'AzureDatabricksDeltaLakeDataset', 'SharePointOnlineListResource': 'SharePointOnlineListResourceDataset', 'SnowflakeTable': 'SnowflakeDataset', 'GoogleAdWordsObject': 'GoogleAdWordsObjectDataset', 'AzureDataExplorerTable': 'AzureDataExplorerTableDataset', 'OracleServiceCloudObject': 'OracleServiceCloudObjectDataset', 'DynamicsAXResource': 'DynamicsAXResourceDataset', 'ResponsysObject': 'ResponsysObjectDataset', 'SalesforceMarketingCloudObject': 'SalesforceMarketingCloudObjectDataset', 'VerticaTable': 'VerticaTableDataset', 'NetezzaTable': 'NetezzaTableDataset', 'ZohoObject': 'ZohoObjectDataset', 'XeroObject': 'XeroObjectDataset', 'SquareObject': 'SquareObjectDataset', 'SparkObject': 'SparkObjectDataset', 'ShopifyObject': 'ShopifyObjectDataset', 'ServiceNowObject': 'ServiceNowObjectDataset', 'QuickBooksObject': 'QuickBooksObjectDataset', 'PrestoObject': 'PrestoObjectDataset', 'PhoenixObject': 'PhoenixObjectDataset', 'PaypalObject': 'PaypalObjectDataset', 'MarketoObject': 'MarketoObjectDataset', 'AzureMariaDBTable': 'AzureMariaDBTableDataset', 'MariaDBTable': 'MariaDBTableDataset', 'MagentoObject': 'MagentoObjectDataset', 'JiraObject': 'JiraObjectDataset', 'ImpalaObject': 'ImpalaObjectDataset', 'HubspotObject': 'HubspotObjectDataset', 'HiveObject': 'HiveObjectDataset', 'HBaseObject': 'HBaseObjectDataset', 'GreenplumTable': 'GreenplumTableDataset', 'GoogleBigQueryObject': 'GoogleBigQueryObjectDataset', 'EloquaObject': 'EloquaObjectDataset', 'DrillTable': 'DrillTableDataset', 'CouchbaseTable': 'CouchbaseTableDataset', 'ConcurObject': 'ConcurObjectDataset', 'AzurePostgreSqlTable': 'AzurePostgreSqlTableDataset', 'AmazonMWSObject': 'AmazonMWSObjectDataset', 'HttpFile': 'HttpDataset', 'AzureSearchIndex': 'AzureSearchIndexDataset', 'WebTable': 'WebTableDataset', 'SapTableResource': 'SapTableResourceDataset', 'RestResource': 'RestResourceDataset', 'SqlServerTable': 'SqlServerTableDataset', 'SapOpenHubTable': 'SapOpenHubTableDataset', 'SapHanaTable': 'SapHanaTableDataset', 'SapEccResource': 'SapEccResourceDataset', 'SapCloudForCustomerResource': 'SapCloudForCustomerResourceDataset', 'SapBwCube': 'SapBwCubeDataset', 'SybaseTable': 'SybaseTableDataset', 'SalesforceServiceCloudObject': 'SalesforceServiceCloudObjectDataset', 'SalesforceObject': 'SalesforceObjectDataset', 'MicrosoftAccessTable': 'MicrosoftAccessTableDataset', 'PostgreSqlTable': 'PostgreSqlTableDataset', 'MySqlTable': 'MySqlTableDataset', 'OdbcTable': 'OdbcTableDataset', 'InformixTable': 'InformixTableDataset', 'RelationalTable': 'RelationalTableDataset', 'Db2Table': 'Db2TableDataset', 'AmazonRedshiftTable': 'AmazonRedshiftTableDataset', 'AzureMySqlTable': 'AzureMySqlTableDataset', 'TeradataTable': 'TeradataTableDataset', 'OracleTable': 'OracleTableDataset', 'ODataResource': 'ODataResourceDataset', 'CosmosDbMongoDbApiCollection': 'CosmosDbMongoDbApiCollectionDataset', 'MongoDbV2Collection': 'MongoDbV2CollectionDataset', 'MongoDbCollection': 'MongoDbCollectionDataset', 'FileShare': 'FileShareDataset', 'Office365Table': 'Office365Dataset', 'AzureBlobFSFile': 'AzureBlobFSDataset', 'AzureDataLakeStoreFile': 'AzureDataLakeStoreDataset', 'CommonDataServiceForAppsEntity': 'CommonDataServiceForAppsEntityDataset', 'DynamicsCrmEntity': 'DynamicsCrmEntityDataset', 'DynamicsEntity': 'DynamicsEntityDataset', 'DocumentDbCollection': 'DocumentDbCollectionDataset', 'CosmosDbSqlApiCollection': 'CosmosDbSqlApiCollectionDataset', 'CustomDataset': 'CustomDataset', 'CassandraTable': 'CassandraTableDataset', 'AzureSqlDWTable': 'AzureSqlDWTableDataset', 'AzureSqlMITable': 'AzureSqlMITableDataset', 'AzureSqlTable': 'AzureSqlTableDataset', 'AzureTable': 'AzureTableDataset', 'AzureBlob': 'AzureBlobDataset', 'Binary': 'BinaryDataset', 'Orc': 'OrcDataset', 'Xml': 'XmlDataset', 'Json': 'JsonDataset', 'DelimitedText': 'DelimitedTextDataset', 'Parquet': 'ParquetDataset', 'Excel': 'ExcelDataset', 'Avro': 'AvroDataset', 'AmazonS3Object': 'AmazonS3Dataset'} } def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, **kwargs) -> None: @@ -677,7 +678,8 @@ class CopySource(Model): """A copy activity source. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SharePointOnlineListSource, SnowflakeSource, HttpSource, + sub-classes are: SharePointOnlineListSource, + AzureDatabricksDeltaLakeSource, SnowflakeSource, HttpSource, AzureBlobFSSource, AzureDataLakeStoreSource, Office365Source, CosmosDbMongoDbApiSource, MongoDbV2Source, MongoDbSource, WebSource, OracleSource, AzureDataExplorerSource, HdfsSource, FileSystemSource, @@ -721,7 +723,7 @@ class CopySource(Model): } _subtype_map = { - 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} + 'type': {'SharePointOnlineListSource': 'SharePointOnlineListSource', 'AzureDatabricksDeltaLakeSource': 'AzureDatabricksDeltaLakeSource', 'SnowflakeSource': 'SnowflakeSource', 'HttpSource': 'HttpSource', 'AzureBlobFSSource': 'AzureBlobFSSource', 'AzureDataLakeStoreSource': 'AzureDataLakeStoreSource', 'Office365Source': 'Office365Source', 'CosmosDbMongoDbApiSource': 'CosmosDbMongoDbApiSource', 'MongoDbV2Source': 'MongoDbV2Source', 'MongoDbSource': 'MongoDbSource', 'WebSource': 'WebSource', 'OracleSource': 'OracleSource', 'AzureDataExplorerSource': 'AzureDataExplorerSource', 'HdfsSource': 'HdfsSource', 'FileSystemSource': 'FileSystemSource', 'RestSource': 'RestSource', 'SalesforceServiceCloudSource': 'SalesforceServiceCloudSource', 'ODataSource': 'ODataSource', 'MicrosoftAccessSource': 'MicrosoftAccessSource', 'RelationalSource': 'RelationalSource', 'CommonDataServiceForAppsSource': 'CommonDataServiceForAppsSource', 'DynamicsCrmSource': 'DynamicsCrmSource', 'DynamicsSource': 'DynamicsSource', 'CosmosDbSqlApiSource': 'CosmosDbSqlApiSource', 'DocumentDbCollectionSource': 'DocumentDbCollectionSource', 'BlobSource': 'BlobSource', 'TabularSource': 'TabularSource', 'BinarySource': 'BinarySource', 'OrcSource': 'OrcSource', 'XmlSource': 'XmlSource', 'JsonSource': 'JsonSource', 'DelimitedTextSource': 'DelimitedTextSource', 'ParquetSource': 'ParquetSource', 'ExcelSource': 'ExcelSource', 'AvroSource': 'AvroSource'} } def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -1708,8 +1710,8 @@ class CopySink(Model): SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, - SapCloudForCustomerSink, AzureMySqlSink, AzurePostgreSqlSink, RestSink, - OrcSink, JsonSink, DelimitedTextSink + SapCloudForCustomerSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, + AzurePostgreSqlSink, RestSink, OrcSink, JsonSink, DelimitedTextSink All required parameters must be populated in order to send to Azure. @@ -1753,7 +1755,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -1880,7 +1882,7 @@ class FormatWriteSettings(Model): You probably want to use the sub-classes and not this class directly. Known sub-classes are: JsonWriteSettings, DelimitedTextWriteSettings, - AvroWriteSettings + OrcWriteSettings, AvroWriteSettings, ParquetWriteSettings All required parameters must be populated in order to send to Azure. @@ -1901,7 +1903,7 @@ class FormatWriteSettings(Model): } _subtype_map = { - 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings'} + 'type': {'JsonWriteSettings': 'JsonWriteSettings', 'DelimitedTextWriteSettings': 'DelimitedTextWriteSettings', 'OrcWriteSettings': 'OrcWriteSettings', 'AvroWriteSettings': 'AvroWriteSettings', 'ParquetWriteSettings': 'ParquetWriteSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -1925,6 +1927,15 @@ class AvroWriteSettings(FormatWriteSettings): :type record_name: str :param record_namespace: Record namespace in the write result. :type record_namespace: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -1936,12 +1947,16 @@ class AvroWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'record_name': {'key': 'recordName', 'type': 'str'}, 'record_namespace': {'key': 'recordNamespace', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, record_name: str=None, record_namespace: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, record_name: str=None, record_namespace: str=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: super(AvroWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.record_name = record_name self.record_namespace = record_namespace + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix self.type = 'AvroWriteSettings' @@ -2896,6 +2911,391 @@ def __init__(self, *, additional_properties=None, max_concurrent_connections=Non self.type = 'AzureBlobStorageWriteSettings' +class AzureDatabricksDeltaLakeDataset(Dataset): + """Azure Databricks Delta Lake dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param description: Dataset description. + :type description: str + :param structure: Columns that define the structure of the dataset. Type: + array (or Expression with resultType array), itemType: DatasetDataElement. + :type structure: object + :param schema: Columns that define the physical type schema of the + dataset. Type: array (or Expression with resultType array), itemType: + DatasetSchemaDataElement. + :type schema: object + :param linked_service_name: Required. Linked service reference. + :type linked_service_name: + ~azure.mgmt.datafactory.models.LinkedServiceReference + :param parameters: Parameters for dataset. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + Dataset. + :type annotations: list[object] + :param folder: The folder that this Dataset is in. If not specified, + Dataset will appear at the root level. + :type folder: ~azure.mgmt.datafactory.models.DatasetFolder + :param type: Required. Constant filled by server. + :type type: str + :param table: The name of delta table. Type: string (or Expression with + resultType string). + :type table: object + :param database: The database name of delta table. Type: string (or + Expression with resultType string). + :type database: object + """ + + _validation = { + 'linked_service_name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'description': {'key': 'description', 'type': 'str'}, + 'structure': {'key': 'structure', 'type': 'object'}, + 'schema': {'key': 'schema', 'type': 'object'}, + 'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, + 'type': {'key': 'type', 'type': 'str'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table=None, database=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) + self.table = table + self.database = database + self.type = 'AzureDatabricksDeltaLakeDataset' + + +class ExportSettings(Model): + """Export command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SnowflakeExportCopyCommand, + AzureDatabricksDeltaLakeExportCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand', 'AzureDatabricksDeltaLakeExportCommand': 'AzureDatabricksDeltaLakeExportCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ExportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + +class AzureDatabricksDeltaLakeExportCommand(ExportSettings): + """Azure Databricks Delta Lake export command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for the csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, date_format=None, timestamp_format=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeExportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.date_format = date_format + self.timestamp_format = timestamp_format + self.type = 'AzureDatabricksDeltaLakeExportCommand' + + +class ImportSettings(Model): + """Import command settings. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: AzureDatabricksDeltaLakeImportCommand, + SnowflakeImportCopyCommand + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'AzureDatabricksDeltaLakeImportCommand': 'AzureDatabricksDeltaLakeImportCommand', 'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(ImportSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + +class AzureDatabricksDeltaLakeImportCommand(ImportSettings): + """Azure Databricks Delta Lake import command settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param date_format: Specify the date format for csv in Azure Databricks + Delta Lake Copy. Type: string (or Expression with resultType string). + :type date_format: object + :param timestamp_format: Specify the timestamp format for csv in Azure + Databricks Delta Lake Copy. Type: string (or Expression with resultType + string). + :type timestamp_format: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'date_format': {'key': 'dateFormat', 'type': 'object'}, + 'timestamp_format': {'key': 'timestampFormat', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, date_format=None, timestamp_format=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeImportCommand, self).__init__(additional_properties=additional_properties, **kwargs) + self.date_format = date_format + self.timestamp_format = timestamp_format + self.type = 'AzureDatabricksDeltaLakeImportCommand' + + +class AzureDatabricksDeltaLakeLinkedService(LinkedService): + """Azure Databricks Delta Lake linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param domain: Required. .azuredatabricks.net, domain name of your + Databricks deployment. Type: string (or Expression with resultType + string). + :type domain: object + :param access_token: Access token for databricks REST API. Refer to + https://docs.azuredatabricks.net/api/latest/authentication.html. Type: + string, SecureString or AzureKeyVaultSecretReference. + :type access_token: ~azure.mgmt.datafactory.models.SecretBase + :param cluster_id: The id of an existing interactive cluster that will be + used for all runs of this job. Type: string (or Expression with resultType + string). + :type cluster_id: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'domain': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'domain': {'key': 'typeProperties.domain', 'type': 'object'}, + 'access_token': {'key': 'typeProperties.accessToken', 'type': 'SecretBase'}, + 'cluster_id': {'key': 'typeProperties.clusterId', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, domain, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_token=None, cluster_id=None, encrypted_credential=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.domain = domain + self.access_token = access_token + self.cluster_id = cluster_id + self.encrypted_credential = encrypted_credential + self.type = 'AzureDatabricksDeltaLake' + + +class AzureDatabricksDeltaLakeSink(CopySink): + """A copy activity Azure Databricks Delta Lake sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param pre_copy_script: SQL pre-copy script. Type: string (or Expression + with resultType string). + :type pre_copy_script: object + :param import_settings: Azure Databricks Delta Lake import settings. + :type import_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeImportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'pre_copy_script': {'key': 'preCopyScript', 'type': 'object'}, + 'import_settings': {'key': 'importSettings', 'type': 'AzureDatabricksDeltaLakeImportCommand'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, pre_copy_script=None, import_settings=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.pre_copy_script = pre_copy_script + self.import_settings = import_settings + self.type = 'AzureDatabricksDeltaLakeSink' + + +class AzureDatabricksDeltaLakeSource(CopySource): + """A copy activity Azure Databricks Delta Lake source. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param source_retry_count: Source retry count. Type: integer (or + Expression with resultType integer). + :type source_retry_count: object + :param source_retry_wait: Source retry wait. Type: string (or Expression + with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type source_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param query: Azure Databricks Delta Lake Sql query. Type: string (or + Expression with resultType string). + :type query: object + :param export_settings: Azure Databricks Delta Lake export settings. + :type export_settings: + ~azure.mgmt.datafactory.models.AzureDatabricksDeltaLakeExportCommand + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'source_retry_count': {'key': 'sourceRetryCount', 'type': 'object'}, + 'source_retry_wait': {'key': 'sourceRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'query': {'key': 'query', 'type': 'object'}, + 'export_settings': {'key': 'exportSettings', 'type': 'AzureDatabricksDeltaLakeExportCommand'}, + } + + def __init__(self, *, additional_properties=None, source_retry_count=None, source_retry_wait=None, max_concurrent_connections=None, query=None, export_settings=None, **kwargs) -> None: + super(AzureDatabricksDeltaLakeSource, self).__init__(additional_properties=additional_properties, source_retry_count=source_retry_count, source_retry_wait=source_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.query = query + self.export_settings = export_settings + self.type = 'AzureDatabricksDeltaLakeSource' + + class AzureDatabricksLinkedService(LinkedService): """Azure Databricks linked service. @@ -7998,7 +8398,8 @@ class CompressionReadSettings(Model): """Compression read settings. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ZipDeflateReadSettings + sub-classes are: TarGZipReadSettings, TarReadSettings, + ZipDeflateReadSettings All required parameters must be populated in order to send to Azure. @@ -8019,7 +8420,7 @@ class CompressionReadSettings(Model): } _subtype_map = { - 'type': {'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} + 'type': {'TarGZipReadSettings': 'TarGZipReadSettings', 'TarReadSettings': 'TarReadSettings', 'ZipDeflateReadSettings': 'ZipDeflateReadSettings'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -8385,6 +8786,40 @@ def __init__(self, *, name: str, source, sink, additional_properties=None, descr self.type = 'Copy' +class CopyTranslator(Model): + """A copy activity translator. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TabularTranslator + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'TabularTranslator': 'TabularTranslator'} + } + + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(CopyTranslator, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + + class CosmosDbLinkedService(LinkedService): """Microsoft Azure Cosmos Database (CosmosDB) linked service. @@ -10220,7 +10655,8 @@ class DatasetCompression(Model): """The compression method used on a dataset. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatasetZipDeflateCompression, DatasetDeflateCompression, + sub-classes are: DatasetTarGZipCompression, DatasetTarCompression, + DatasetZipDeflateCompression, DatasetDeflateCompression, DatasetGZipCompression, DatasetBZip2Compression All required parameters must be populated in order to send to Azure. @@ -10242,7 +10678,7 @@ class DatasetCompression(Model): } _subtype_map = { - 'type': {'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} + 'type': {'TarGZip': 'DatasetTarGZipCompression', 'Tar': 'DatasetTarCompression', 'ZipDeflate': 'DatasetZipDeflateCompression', 'Deflate': 'DatasetDeflateCompression', 'GZip': 'DatasetGZipCompression', 'BZip2': 'DatasetBZip2Compression'} } def __init__(self, *, additional_properties=None, **kwargs) -> None: @@ -10277,6 +10713,28 @@ def __init__(self, *, additional_properties=None, **kwargs) -> None: self.type = 'BZip2' +class DatasetDataElement(Model): + """Columns that define the structure of the dataset. + + :param name: Name of the column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, *, name=None, type=None, **kwargs) -> None: + super(DatasetDataElement, self).__init__(**kwargs) + self.name = name + self.type = type + + class DatasetDebugResource(SubResourceDebugResource): """Dataset debug resource. @@ -10397,63 +10855,146 @@ class DatasetReference(Model): """ _validation = { - 'type': {'required': True, 'constant': True}, - 'reference_name': {'required': True}, + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{object}'}, + } + + type = "DatasetReference" + + def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: + super(DatasetReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters + + +class DatasetResource(SubResource): + """Dataset resource type. + + Variables are only populated by the server, and will be ignored when + sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Dataset properties. + :type properties: ~azure.mgmt.datafactory.models.Dataset + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'Dataset'}, + } + + def __init__(self, *, properties, **kwargs) -> None: + super(DatasetResource, self).__init__(**kwargs) + self.properties = properties + + +class DatasetSchemaDataElement(Model): + """Columns that define the physical type schema of the dataset. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param name: Name of the schema column. Type: string (or Expression with + resultType string). + :type name: object + :param type: Type of the schema column. Type: string (or Expression with + resultType string). + :type type: object + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, name=None, type=None, **kwargs) -> None: + super(DatasetSchemaDataElement, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.name = name + self.type = type + + +class DatasetTarCompression(DatasetCompression): + """The Tar archive method used on a dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'reference_name': {'key': 'referenceName', 'type': 'str'}, - 'parameters': {'key': 'parameters', 'type': '{object}'}, } - type = "DatasetReference" - - def __init__(self, *, reference_name: str, parameters=None, **kwargs) -> None: - super(DatasetReference, self).__init__(**kwargs) - self.reference_name = reference_name - self.parameters = parameters - + def __init__(self, *, additional_properties=None, **kwargs) -> None: + super(DatasetTarCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.type = 'Tar' -class DatasetResource(SubResource): - """Dataset resource type. - Variables are only populated by the server, and will be ignored when - sending a request. +class DatasetTarGZipCompression(DatasetCompression): + """The TarGZip compression method used on a dataset. All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :ivar etag: Etag identifies change in the resource. - :vartype etag: str - :param properties: Required. Dataset properties. - :type properties: ~azure.mgmt.datafactory.models.Dataset + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param level: The TarGZip compression level. + :type level: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'etag': {'readonly': True}, - 'properties': {'required': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'etag': {'key': 'etag', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'Dataset'}, + 'level': {'key': 'level', 'type': 'object'}, } - def __init__(self, *, properties, **kwargs) -> None: - super(DatasetResource, self).__init__(**kwargs) - self.properties = properties + def __init__(self, *, additional_properties=None, level=None, **kwargs) -> None: + super(DatasetTarGZipCompression, self).__init__(additional_properties=additional_properties, **kwargs) + self.level = level + self.type = 'TarGZip' class DatasetZipDeflateCompression(DatasetCompression): @@ -11081,6 +11622,15 @@ class DelimitedTextWriteSettings(FormatWriteSettings): :param file_extension: Required. The file extension used to create the files. Type: string (or Expression with resultType string). :type file_extension: object + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object """ _validation = { @@ -11093,12 +11643,16 @@ class DelimitedTextWriteSettings(FormatWriteSettings): 'type': {'key': 'type', 'type': 'str'}, 'quote_all_text': {'key': 'quoteAllText', 'type': 'object'}, 'file_extension': {'key': 'fileExtension', 'type': 'object'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, } - def __init__(self, *, file_extension, additional_properties=None, quote_all_text=None, **kwargs) -> None: + def __init__(self, *, file_extension, additional_properties=None, quote_all_text=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: super(DelimitedTextWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) self.quote_all_text = quote_all_text self.file_extension = file_extension + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix self.type = 'DelimitedTextWriteSettings' @@ -13047,40 +13601,6 @@ def __init__(self, *, name: str, package_location, connect_via, additional_prope self.type = 'ExecuteSSISPackage' -class ExportSettings(Model): - """Export command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeExportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeExportCopyCommand': 'SnowflakeExportCopyCommand'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ExportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - - class ExposureControlBatchRequest(Model): """A list of exposure control features. @@ -17577,40 +18097,6 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'ImpalaSource' -class ImportSettings(Model): - """Import command settings. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SnowflakeImportCopyCommand - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'SnowflakeImportCopyCommand': 'SnowflakeImportCopyCommand'} - } - - def __init__(self, *, additional_properties=None, **kwargs) -> None: - super(ImportSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.type = None - - class InformixLinkedService(LinkedService): """Informix linked service. @@ -22997,7 +23483,7 @@ class OrcDataset(Dataset): :param location: Required. The location of the ORC data storage. :type location: ~azure.mgmt.datafactory.models.DatasetLocation :param orc_compression_codec: Possible values include: 'none', 'zlib', - 'snappy' + 'snappy', 'lzo' :type orc_compression_codec: str or ~azure.mgmt.datafactory.models.OrcCompressionCodec """ @@ -23093,6 +23579,8 @@ class OrcSink(CopySink): :type type: str :param store_settings: ORC store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: ORC format settings. + :type format_settings: ~azure.mgmt.datafactory.models.OrcWriteSettings """ _validation = { @@ -23108,11 +23596,13 @@ class OrcSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'OrcWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(OrcSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.type = 'OrcSink' @@ -23167,6 +23657,45 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'OrcSource' +class OrcWriteSettings(FormatWriteSettings): + """Orc write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: + super(OrcWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + self.type = 'OrcWriteSettings' + + class PackageStore(Model): """Package store for the SSIS integration runtime. @@ -23351,6 +23880,8 @@ class ParquetSink(CopySink): :type type: str :param store_settings: Parquet store settings. :type store_settings: ~azure.mgmt.datafactory.models.StoreWriteSettings + :param format_settings: Parquet format settings. + :type format_settings: ~azure.mgmt.datafactory.models.ParquetWriteSettings """ _validation = { @@ -23366,11 +23897,13 @@ class ParquetSink(CopySink): 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, 'type': {'key': 'type', 'type': 'str'}, 'store_settings': {'key': 'storeSettings', 'type': 'StoreWriteSettings'}, + 'format_settings': {'key': 'formatSettings', 'type': 'ParquetWriteSettings'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, store_settings=None, format_settings=None, **kwargs) -> None: super(ParquetSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.store_settings = store_settings + self.format_settings = format_settings self.type = 'ParquetSink' @@ -23425,6 +23958,45 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'ParquetSource' +class ParquetWriteSettings(FormatWriteSettings): + """Parquet write settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param max_rows_per_file: Limit the written file's row count to be smaller + than or equal to the specified count. Type: integer (or Expression with + resultType integer). + :type max_rows_per_file: object + :param file_name_prefix: Specifies the file name pattern + _. when copy from non-file based + store without partitionOptions. Type: string (or Expression with + resultType string). + :type file_name_prefix: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_rows_per_file': {'key': 'maxRowsPerFile', 'type': 'object'}, + 'file_name_prefix': {'key': 'fileNamePrefix', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_rows_per_file=None, file_name_prefix=None, **kwargs) -> None: + super(ParquetWriteSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.max_rows_per_file = max_rows_per_file + self.file_name_prefix = file_name_prefix + self.type = 'ParquetWriteSettings' + + class PaypalLinkedService(LinkedService): """Paypal Service linked service. @@ -25546,14 +26118,10 @@ class RestSink(CopySink): :param request_interval: The time to await before sending next request, in milliseconds :type request_interval: object - :param compression_type: Compression Type to Send data in compressed - format with Optimal Compression Level, Default is None. And The Only - Supported option is Gzip. - :type compression_type: object - :param wrap_request_json_in_an_object: Wraps Request Array Json into an - Object before calling the rest endpoint , Default is false. ex: if true - request content sample format is { rows:[]} else the format is [] - :type wrap_request_json_in_an_object: object + :param http_compression_type: Http Compression Type to Send data in + compressed format with Optimal Compression Level, Default is None. And The + Only Supported option is Gzip. + :type http_compression_type: object """ _validation = { @@ -25572,18 +26140,16 @@ class RestSink(CopySink): 'additional_headers': {'key': 'additionalHeaders', 'type': 'object'}, 'http_request_timeout': {'key': 'httpRequestTimeout', 'type': 'object'}, 'request_interval': {'key': 'requestInterval', 'type': 'object'}, - 'compression_type': {'key': 'compressionType', 'type': 'object'}, - 'wrap_request_json_in_an_object': {'key': 'wrapRequestJsonInAnObject', 'type': 'object'}, + 'http_compression_type': {'key': 'httpCompressionType', 'type': 'object'}, } - def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, request_method=None, additional_headers=None, http_request_timeout=None, request_interval=None, compression_type=None, wrap_request_json_in_an_object=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, request_method=None, additional_headers=None, http_request_timeout=None, request_interval=None, http_compression_type=None, **kwargs) -> None: super(RestSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) self.request_method = request_method self.additional_headers = additional_headers self.http_request_timeout = http_request_timeout self.request_interval = request_interval - self.compression_type = compression_type - self.wrap_request_json_in_an_object = wrap_request_json_in_an_object + self.http_compression_type = http_compression_type self.type = 'RestSink' @@ -32041,6 +32607,142 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'SybaseTable' +class TabularTranslator(CopyTranslator): + """A copy activity tabular translator. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param column_mappings: Column mappings. Example: "UserId: MyUserId, + Group: MyGroup, Name: MyName" Type: string (or Expression with resultType + string). This property will be retired. Please use mappings property. + :type column_mappings: object + :param schema_mapping: The schema mapping to map between tabular data and + hierarchical data. Example: {"Column1": "$.Column1", "Column2": + "$.Column2.Property1", "Column3": "$.Column2.Property2"}. Type: object (or + Expression with resultType object). This property will be retired. Please + use mappings property. + :type schema_mapping: object + :param collection_reference: The JSON Path of the Nested Array that is + going to do cross-apply. Type: object (or Expression with resultType + object). + :type collection_reference: object + :param map_complex_values_to_string: Whether to map complex (array and + object) values to simple strings in json format. Type: boolean (or + Expression with resultType boolean). + :type map_complex_values_to_string: object + :param mappings: Column mappings with logical types. Tabular->tabular + example: + [{"source":{"name":"CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"name":"CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Hierarchical->tabular example: + [{"source":{"path":"$.CustomerName","type":"String"},"sink":{"name":"ClientName","type":"String"}},{"source":{"path":"$.CustomerAddress","type":"String"},"sink":{"name":"ClientAddress","type":"String"}}]. + Type: object (or Expression with resultType object). + :type mappings: object + :param type_conversion: Whether to enable the advanced type conversion + feature in the Copy activity. Type: boolean (or Expression with resultType + boolean). + :type type_conversion: object + :param type_conversion_settings: Type conversion settings + :type type_conversion_settings: + ~azure.mgmt.datafactory.models.TypeConversionSettings + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'column_mappings': {'key': 'columnMappings', 'type': 'object'}, + 'schema_mapping': {'key': 'schemaMapping', 'type': 'object'}, + 'collection_reference': {'key': 'collectionReference', 'type': 'object'}, + 'map_complex_values_to_string': {'key': 'mapComplexValuesToString', 'type': 'object'}, + 'mappings': {'key': 'mappings', 'type': 'object'}, + 'type_conversion': {'key': 'typeConversion', 'type': 'object'}, + 'type_conversion_settings': {'key': 'typeConversionSettings', 'type': 'TypeConversionSettings'}, + } + + def __init__(self, *, additional_properties=None, column_mappings=None, schema_mapping=None, collection_reference=None, map_complex_values_to_string=None, mappings=None, type_conversion=None, type_conversion_settings=None, **kwargs) -> None: + super(TabularTranslator, self).__init__(additional_properties=additional_properties, **kwargs) + self.column_mappings = column_mappings + self.schema_mapping = schema_mapping + self.collection_reference = collection_reference + self.map_complex_values_to_string = map_complex_values_to_string + self.mappings = mappings + self.type_conversion = type_conversion + self.type_conversion_settings = type_conversion_settings + self.type = 'TabularTranslator' + + +class TarGZipReadSettings(CompressionReadSettings): + """The TarGZip compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, preserve_compression_file_name_as_folder=None, **kwargs) -> None: + super(TarGZipReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + self.type = 'TarGZipReadSettings' + + +class TarReadSettings(CompressionReadSettings): + """The Tar compression read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Constant filled by server. + :type type: str + :param preserve_compression_file_name_as_folder: Preserve the compression + file name as folder path. Type: boolean (or Expression with resultType + boolean). + :type preserve_compression_file_name_as_folder: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'preserve_compression_file_name_as_folder': {'key': 'preserveCompressionFileNameAsFolder', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, preserve_compression_file_name_as_folder=None, **kwargs) -> None: + super(TarReadSettings, self).__init__(additional_properties=additional_properties, **kwargs) + self.preserve_compression_file_name_as_folder = preserve_compression_file_name_as_folder + self.type = 'TarReadSettings' + + class TeradataLinkedService(LinkedService): """Linked service for Teradata data source. @@ -32811,6 +33513,49 @@ def __init__(self, *, reference_trigger, offset: str=None, size: str=None, **kwa self.type = 'TumblingWindowTriggerDependencyReference' +class TypeConversionSettings(Model): + """Type conversion settings. + + :param allow_data_truncation: Whether to allow data truncation when + converting the data. Type: boolean (or Expression with resultType + boolean). + :type allow_data_truncation: object + :param treat_boolean_as_number: Whether to treat boolean values as + numbers. Type: boolean (or Expression with resultType boolean). + :type treat_boolean_as_number: object + :param date_time_format: The format for DateTime values. Type: string (or + Expression with resultType string). + :type date_time_format: object + :param date_time_offset_format: The format for DateTimeOffset values. + Type: string (or Expression with resultType string). + :type date_time_offset_format: object + :param time_span_format: The format for TimeSpan values. Type: string (or + Expression with resultType string). + :type time_span_format: object + :param culture: The culture used to convert data from/to string. Type: + string (or Expression with resultType string). + :type culture: object + """ + + _attribute_map = { + 'allow_data_truncation': {'key': 'allowDataTruncation', 'type': 'object'}, + 'treat_boolean_as_number': {'key': 'treatBooleanAsNumber', 'type': 'object'}, + 'date_time_format': {'key': 'dateTimeFormat', 'type': 'object'}, + 'date_time_offset_format': {'key': 'dateTimeOffsetFormat', 'type': 'object'}, + 'time_span_format': {'key': 'timeSpanFormat', 'type': 'object'}, + 'culture': {'key': 'culture', 'type': 'object'}, + } + + def __init__(self, *, allow_data_truncation=None, treat_boolean_as_number=None, date_time_format=None, date_time_offset_format=None, time_span_format=None, culture=None, **kwargs) -> None: + super(TypeConversionSettings, self).__init__(**kwargs) + self.allow_data_truncation = allow_data_truncation + self.treat_boolean_as_number = treat_boolean_as_number + self.date_time_format = date_time_format + self.date_time_offset_format = date_time_offset_format + self.time_span_format = time_span_format + self.culture = culture + + class UntilActivity(ControlActivity): """This activity executes inner activities until the specified boolean expression results to true or timeout is reached, whichever is earlier. diff --git a/sdk/datafactory/azure-mgmt-datafactory/setup.py b/sdk/datafactory/azure-mgmt-datafactory/setup.py index 2287f0c260af..3b3fd5c68d7e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/setup.py +++ b/sdk/datafactory/azure-mgmt-datafactory/setup.py @@ -36,7 +36,7 @@ pass # Version extraction inspired from 'requests' -with open(os.path.join(package_folder_path, 'version.py') +with open(os.path.join(package_folder_path, 'version.py') if os.path.exists(os.path.join(package_folder_path, 'version.py')) else os.path.join(package_folder_path, '_version.py'), 'r') as fd: version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',