diff --git a/sdk/datafactory/arm-datafactory/src/models/dataFlowDebugSessionMappers.ts b/sdk/datafactory/arm-datafactory/src/models/dataFlowDebugSessionMappers.ts
index 0d470f86b98c..b9ccb6592dde 100644
--- a/sdk/datafactory/arm-datafactory/src/models/dataFlowDebugSessionMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/dataFlowDebugSessionMappers.ts
@@ -120,6 +120,7 @@ export {
EloquaObjectDataset,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
FileServerLinkedService,
FileServerLocation,
FileShareDataset,
@@ -200,6 +201,7 @@ export {
OracleTableDataset,
OrcDataset,
OrcFormat,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -245,8 +247,12 @@ export {
ServiceNowObjectDataset,
SftpLocation,
SftpServerLinkedService,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
ShopifyLinkedService,
ShopifyObjectDataset,
+ SnowflakeDataset,
+ SnowflakeLinkedService,
SparkLinkedService,
SparkObjectDataset,
SqlServerLinkedService,
@@ -270,6 +276,7 @@ export {
WebTableDataset,
XeroLinkedService,
XeroObjectDataset,
+ XmlDataset,
ZohoLinkedService,
ZohoObjectDataset
} from "../models/mappers";
diff --git a/sdk/datafactory/arm-datafactory/src/models/dataFlowsMappers.ts b/sdk/datafactory/arm-datafactory/src/models/dataFlowsMappers.ts
index 9d3716d531f6..bdf0b64bd722 100644
--- a/sdk/datafactory/arm-datafactory/src/models/dataFlowsMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/dataFlowsMappers.ts
@@ -98,6 +98,7 @@ export {
AzureTableStorageLinkedService,
BaseResource,
BinaryDataset,
+ BinaryReadSettings,
BinarySink,
BinarySource,
BlobEventsTrigger,
@@ -115,6 +116,7 @@ export {
CommonDataServiceForAppsSink,
CommonDataServiceForAppsSource,
ComponentSetup,
+ CompressionReadSettings,
ConcurLinkedService,
ConcurObjectDataset,
ConcurSource,
@@ -196,11 +198,14 @@ export {
EloquaSource,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
ExecuteDataFlowActivity,
ExecuteDataFlowActivityTypePropertiesCompute,
ExecutePipelineActivity,
ExecuteSSISPackageActivity,
ExecutionActivity,
+ ExportSettings,
Expression,
Factory,
FactoryGitHubConfiguration,
@@ -263,6 +268,7 @@ export {
ImpalaLinkedService,
ImpalaObjectDataset,
ImpalaSource,
+ ImportSettings,
InformixLinkedService,
InformixSink,
InformixSource,
@@ -282,6 +288,7 @@ export {
JiraSource,
JsonDataset,
JsonFormat,
+ JsonReadSettings,
JsonSink,
JsonSource,
JsonWriteSettings,
@@ -345,6 +352,7 @@ export {
OrcFormat,
OrcSink,
OrcSource,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -431,10 +439,19 @@ export {
SftpReadSettings,
SftpServerLinkedService,
SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
ShopifyLinkedService,
ShopifyObjectDataset,
ShopifySource,
SkipErrorFile,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
SparkLinkedService,
SparkObjectDataset,
SparkSource,
@@ -504,6 +521,10 @@ export {
XeroLinkedService,
XeroObjectDataset,
XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZipDeflateReadSettings,
ZohoLinkedService,
ZohoObjectDataset,
ZohoSource
diff --git a/sdk/datafactory/arm-datafactory/src/models/datasetsMappers.ts b/sdk/datafactory/arm-datafactory/src/models/datasetsMappers.ts
index 4fd2bf0b1967..2111c4dd05c6 100644
--- a/sdk/datafactory/arm-datafactory/src/models/datasetsMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/datasetsMappers.ts
@@ -98,6 +98,7 @@ export {
AzureTableStorageLinkedService,
BaseResource,
BinaryDataset,
+ BinaryReadSettings,
BinarySink,
BinarySource,
BlobEventsTrigger,
@@ -115,6 +116,7 @@ export {
CommonDataServiceForAppsSink,
CommonDataServiceForAppsSource,
ComponentSetup,
+ CompressionReadSettings,
ConcurLinkedService,
ConcurObjectDataset,
ConcurSource,
@@ -196,11 +198,14 @@ export {
EloquaSource,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
ExecuteDataFlowActivity,
ExecuteDataFlowActivityTypePropertiesCompute,
ExecutePipelineActivity,
ExecuteSSISPackageActivity,
ExecutionActivity,
+ ExportSettings,
Expression,
Factory,
FactoryGitHubConfiguration,
@@ -263,6 +268,7 @@ export {
ImpalaLinkedService,
ImpalaObjectDataset,
ImpalaSource,
+ ImportSettings,
InformixLinkedService,
InformixSink,
InformixSource,
@@ -282,6 +288,7 @@ export {
JiraSource,
JsonDataset,
JsonFormat,
+ JsonReadSettings,
JsonSink,
JsonSource,
JsonWriteSettings,
@@ -345,6 +352,7 @@ export {
OrcFormat,
OrcSink,
OrcSource,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -431,10 +439,19 @@ export {
SftpReadSettings,
SftpServerLinkedService,
SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
ShopifyLinkedService,
ShopifyObjectDataset,
ShopifySource,
SkipErrorFile,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
SparkLinkedService,
SparkObjectDataset,
SparkSource,
@@ -504,6 +521,10 @@ export {
XeroLinkedService,
XeroObjectDataset,
XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZipDeflateReadSettings,
ZohoLinkedService,
ZohoObjectDataset,
ZohoSource
diff --git a/sdk/datafactory/arm-datafactory/src/models/factoriesMappers.ts b/sdk/datafactory/arm-datafactory/src/models/factoriesMappers.ts
index 1e2234e64d0a..ec36aceea94e 100644
--- a/sdk/datafactory/arm-datafactory/src/models/factoriesMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/factoriesMappers.ts
@@ -99,6 +99,7 @@ export {
AzureTableStorageLinkedService,
BaseResource,
BinaryDataset,
+ BinaryReadSettings,
BinarySink,
BinarySource,
BlobEventsTrigger,
@@ -116,6 +117,7 @@ export {
CommonDataServiceForAppsSink,
CommonDataServiceForAppsSource,
ComponentSetup,
+ CompressionReadSettings,
ConcurLinkedService,
ConcurObjectDataset,
ConcurSource,
@@ -196,11 +198,14 @@ export {
EloquaSource,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
ExecuteDataFlowActivity,
ExecuteDataFlowActivityTypePropertiesCompute,
ExecutePipelineActivity,
ExecuteSSISPackageActivity,
ExecutionActivity,
+ ExportSettings,
Expression,
Factory,
FactoryGitHubConfiguration,
@@ -268,6 +273,7 @@ export {
ImpalaLinkedService,
ImpalaObjectDataset,
ImpalaSource,
+ ImportSettings,
InformixLinkedService,
InformixSink,
InformixSource,
@@ -287,6 +293,7 @@ export {
JiraSource,
JsonDataset,
JsonFormat,
+ JsonReadSettings,
JsonSink,
JsonSource,
JsonWriteSettings,
@@ -350,6 +357,7 @@ export {
OrcFormat,
OrcSink,
OrcSource,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -436,10 +444,19 @@ export {
SftpReadSettings,
SftpServerLinkedService,
SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
ShopifyLinkedService,
ShopifyObjectDataset,
ShopifySource,
SkipErrorFile,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
SparkLinkedService,
SparkObjectDataset,
SparkSource,
@@ -510,6 +527,10 @@ export {
XeroLinkedService,
XeroObjectDataset,
XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZipDeflateReadSettings,
ZohoLinkedService,
ZohoObjectDataset,
ZohoSource
diff --git a/sdk/datafactory/arm-datafactory/src/models/index.ts b/sdk/datafactory/arm-datafactory/src/models/index.ts
index 3cb4579316c5..a51eced055e4 100644
--- a/sdk/datafactory/arm-datafactory/src/models/index.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/index.ts
@@ -436,7 +436,7 @@ export interface ParameterSpecification {
/**
* Contains the possible cases for LinkedService.
*/
-export type LinkedServiceUnion = LinkedService | AzureFunctionLinkedService | AzureDataExplorerLinkedService | SapTableLinkedService | GoogleAdWordsLinkedService | OracleServiceCloudLinkedService | DynamicsAXLinkedService | ResponsysLinkedService | AzureDatabricksLinkedService | AzureDataLakeAnalyticsLinkedService | HDInsightOnDemandLinkedService | SalesforceMarketingCloudLinkedService | NetezzaLinkedService | VerticaLinkedService | ZohoLinkedService | XeroLinkedService | SquareLinkedService | SparkLinkedService | ShopifyLinkedService | ServiceNowLinkedService | QuickBooksLinkedService | PrestoLinkedService | PhoenixLinkedService | PaypalLinkedService | MarketoLinkedService | AzureMariaDBLinkedService | MariaDBLinkedService | MagentoLinkedService | JiraLinkedService | ImpalaLinkedService | HubspotLinkedService | HiveLinkedService | HBaseLinkedService | GreenplumLinkedService | GoogleBigQueryLinkedService | EloquaLinkedService | DrillLinkedService | CouchbaseLinkedService | ConcurLinkedService | AzurePostgreSqlLinkedService | AmazonMWSLinkedService | SapHanaLinkedService | SapBWLinkedService | SftpServerLinkedService | FtpServerLinkedService | HttpLinkedService | AzureSearchLinkedService | CustomDataSourceLinkedService | AmazonRedshiftLinkedService | AmazonS3LinkedService | RestServiceLinkedService | SapOpenHubLinkedService | SapEccLinkedService | SapCloudForCustomerLinkedService | SalesforceServiceCloudLinkedService | SalesforceLinkedService | Office365LinkedService | AzureBlobFSLinkedService | AzureDataLakeStoreLinkedService | CosmosDbMongoDbApiLinkedService | MongoDbV2LinkedService | MongoDbLinkedService | CassandraLinkedService | WebLinkedService | ODataLinkedService | HdfsLinkedService | MicrosoftAccessLinkedService | InformixLinkedService | OdbcLinkedService | AzureMLServiceLinkedService | AzureMLLinkedService | TeradataLinkedService | Db2LinkedService | SybaseLinkedService | PostgreSqlLinkedService | MySqlLinkedService | AzureMySqlLinkedService | OracleLinkedService | GoogleCloudStorageLinkedService | AzureFileStorageLinkedService | FileServerLinkedService | HDInsightLinkedService | CommonDataServiceForAppsLinkedService | DynamicsCrmLinkedService | DynamicsLinkedService | CosmosDbLinkedService | AzureKeyVaultLinkedService | AzureBatchLinkedService | AzureSqlMILinkedService | AzureSqlDatabaseLinkedService | SqlServerLinkedService | AzureSqlDWLinkedService | AzureTableStorageLinkedService | AzureBlobStorageLinkedService | AzureStorageLinkedService;
+export type LinkedServiceUnion = LinkedService | SharePointOnlineListLinkedService | SnowflakeLinkedService | AzureFunctionLinkedService | AzureDataExplorerLinkedService | SapTableLinkedService | GoogleAdWordsLinkedService | OracleServiceCloudLinkedService | DynamicsAXLinkedService | ResponsysLinkedService | AzureDatabricksLinkedService | AzureDataLakeAnalyticsLinkedService | HDInsightOnDemandLinkedService | SalesforceMarketingCloudLinkedService | NetezzaLinkedService | VerticaLinkedService | ZohoLinkedService | XeroLinkedService | SquareLinkedService | SparkLinkedService | ShopifyLinkedService | ServiceNowLinkedService | QuickBooksLinkedService | PrestoLinkedService | PhoenixLinkedService | PaypalLinkedService | MarketoLinkedService | AzureMariaDBLinkedService | MariaDBLinkedService | MagentoLinkedService | JiraLinkedService | ImpalaLinkedService | HubspotLinkedService | HiveLinkedService | HBaseLinkedService | GreenplumLinkedService | GoogleBigQueryLinkedService | EloquaLinkedService | DrillLinkedService | CouchbaseLinkedService | ConcurLinkedService | AzurePostgreSqlLinkedService | AmazonMWSLinkedService | SapHanaLinkedService | SapBWLinkedService | SftpServerLinkedService | FtpServerLinkedService | HttpLinkedService | AzureSearchLinkedService | CustomDataSourceLinkedService | AmazonRedshiftLinkedService | AmazonS3LinkedService | RestServiceLinkedService | SapOpenHubLinkedService | SapEccLinkedService | SapCloudForCustomerLinkedService | SalesforceServiceCloudLinkedService | SalesforceLinkedService | Office365LinkedService | AzureBlobFSLinkedService | AzureDataLakeStoreLinkedService | CosmosDbMongoDbApiLinkedService | MongoDbV2LinkedService | MongoDbLinkedService | CassandraLinkedService | WebLinkedService | ODataLinkedService | HdfsLinkedService | MicrosoftAccessLinkedService | InformixLinkedService | OdbcLinkedService | AzureMLServiceLinkedService | AzureMLLinkedService | TeradataLinkedService | Db2LinkedService | SybaseLinkedService | PostgreSqlLinkedService | MySqlLinkedService | AzureMySqlLinkedService | OracleLinkedService | GoogleCloudStorageLinkedService | AzureFileStorageLinkedService | FileServerLinkedService | HDInsightLinkedService | CommonDataServiceForAppsLinkedService | DynamicsCrmLinkedService | DynamicsLinkedService | CosmosDbLinkedService | AzureKeyVaultLinkedService | AzureBatchLinkedService | AzureSqlMILinkedService | AzureSqlDatabaseLinkedService | SqlServerLinkedService | AzureSqlDWLinkedService | AzureTableStorageLinkedService | AzureBlobStorageLinkedService | AzureStorageLinkedService;
/**
* The Azure Data Factory nested object which contains the information and credential which can be
@@ -492,7 +492,7 @@ export interface DatasetFolder {
/**
* Contains the possible cases for Dataset.
*/
-export type DatasetUnion = Dataset | GoogleAdWordsObjectDataset | AzureDataExplorerTableDataset | OracleServiceCloudObjectDataset | DynamicsAXResourceDataset | ResponsysObjectDataset | SalesforceMarketingCloudObjectDataset | VerticaTableDataset | NetezzaTableDataset | ZohoObjectDataset | XeroObjectDataset | SquareObjectDataset | SparkObjectDataset | ShopifyObjectDataset | ServiceNowObjectDataset | QuickBooksObjectDataset | PrestoObjectDataset | PhoenixObjectDataset | PaypalObjectDataset | MarketoObjectDataset | AzureMariaDBTableDataset | MariaDBTableDataset | MagentoObjectDataset | JiraObjectDataset | ImpalaObjectDataset | HubspotObjectDataset | HiveObjectDataset | HBaseObjectDataset | GreenplumTableDataset | GoogleBigQueryObjectDataset | EloquaObjectDataset | DrillTableDataset | CouchbaseTableDataset | ConcurObjectDataset | AzurePostgreSqlTableDataset | AmazonMWSObjectDataset | HttpDataset | AzureSearchIndexDataset | WebTableDataset | SapTableResourceDataset | RestResourceDataset | SqlServerTableDataset | SapOpenHubTableDataset | SapHanaTableDataset | SapEccResourceDataset | SapCloudForCustomerResourceDataset | SapBwCubeDataset | SybaseTableDataset | SalesforceServiceCloudObjectDataset | SalesforceObjectDataset | MicrosoftAccessTableDataset | PostgreSqlTableDataset | MySqlTableDataset | OdbcTableDataset | InformixTableDataset | RelationalTableDataset | Db2TableDataset | AmazonRedshiftTableDataset | AzureMySqlTableDataset | TeradataTableDataset | OracleTableDataset | ODataResourceDataset | CosmosDbMongoDbApiCollectionDataset | MongoDbV2CollectionDataset | MongoDbCollectionDataset | FileShareDataset | Office365Dataset | AzureBlobFSDataset | AzureDataLakeStoreDataset | CommonDataServiceForAppsEntityDataset | DynamicsCrmEntityDataset | DynamicsEntityDataset | DocumentDbCollectionDataset | CosmosDbSqlApiCollectionDataset | CustomDataset | CassandraTableDataset | AzureSqlDWTableDataset | AzureSqlMITableDataset | AzureSqlTableDataset | AzureTableDataset | AzureBlobDataset | BinaryDataset | OrcDataset | JsonDataset | DelimitedTextDataset | ParquetDataset | AvroDataset | AmazonS3Dataset;
+export type DatasetUnion = Dataset | SharePointOnlineListResourceDataset | SnowflakeDataset | GoogleAdWordsObjectDataset | AzureDataExplorerTableDataset | OracleServiceCloudObjectDataset | DynamicsAXResourceDataset | ResponsysObjectDataset | SalesforceMarketingCloudObjectDataset | VerticaTableDataset | NetezzaTableDataset | ZohoObjectDataset | XeroObjectDataset | SquareObjectDataset | SparkObjectDataset | ShopifyObjectDataset | ServiceNowObjectDataset | QuickBooksObjectDataset | PrestoObjectDataset | PhoenixObjectDataset | PaypalObjectDataset | MarketoObjectDataset | AzureMariaDBTableDataset | MariaDBTableDataset | MagentoObjectDataset | JiraObjectDataset | ImpalaObjectDataset | HubspotObjectDataset | HiveObjectDataset | HBaseObjectDataset | GreenplumTableDataset | GoogleBigQueryObjectDataset | EloquaObjectDataset | DrillTableDataset | CouchbaseTableDataset | ConcurObjectDataset | AzurePostgreSqlTableDataset | AmazonMWSObjectDataset | HttpDataset | AzureSearchIndexDataset | WebTableDataset | SapTableResourceDataset | RestResourceDataset | SqlServerTableDataset | SapOpenHubTableDataset | SapHanaTableDataset | SapEccResourceDataset | SapCloudForCustomerResourceDataset | SapBwCubeDataset | SybaseTableDataset | SalesforceServiceCloudObjectDataset | SalesforceObjectDataset | MicrosoftAccessTableDataset | PostgreSqlTableDataset | MySqlTableDataset | OdbcTableDataset | InformixTableDataset | RelationalTableDataset | Db2TableDataset | AmazonRedshiftTableDataset | AzureMySqlTableDataset | TeradataTableDataset | OracleTableDataset | ODataResourceDataset | CosmosDbMongoDbApiCollectionDataset | MongoDbV2CollectionDataset | MongoDbCollectionDataset | FileShareDataset | Office365Dataset | AzureBlobFSDataset | AzureDataLakeStoreDataset | CommonDataServiceForAppsEntityDataset | DynamicsCrmEntityDataset | DynamicsEntityDataset | DocumentDbCollectionDataset | CosmosDbSqlApiCollectionDataset | CustomDataset | CassandraTableDataset | AzureSqlDWTableDataset | AzureSqlMITableDataset | AzureSqlTableDataset | AzureTableDataset | AzureBlobDataset | BinaryDataset | OrcDataset | XmlDataset | JsonDataset | DelimitedTextDataset | ParquetDataset | ExcelDataset | AvroDataset | AmazonS3Dataset;
/**
* The Azure Data Factory nested object which identifies data within different data stores, such as
@@ -1935,6 +1935,14 @@ export interface DataFlowSink extends Transformation {
* Dataset reference.
*/
dataset?: DatasetReference;
+ /**
+ * Linked service reference.
+ */
+ linkedService?: LinkedServiceReference;
+ /**
+ * Schema linked service reference.
+ */
+ schemaLinkedService?: LinkedServiceReference;
}
/**
@@ -1945,6 +1953,14 @@ export interface DataFlowSource extends Transformation {
* Dataset reference.
*/
dataset?: DatasetReference;
+ /**
+ * Linked service reference.
+ */
+ linkedService?: LinkedServiceReference;
+ /**
+ * Schema linked service reference.
+ */
+ schemaLinkedService?: LinkedServiceReference;
}
/**
@@ -1986,6 +2002,98 @@ export interface MappingDataFlow {
script?: string;
}
+/**
+ * SharePoint Online List linked service.
+ */
+export interface SharePointOnlineListLinkedService {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "SharePointOnlineList";
+ /**
+ * The integration runtime reference.
+ */
+ connectVia?: IntegrationRuntimeReference;
+ /**
+ * Linked service description.
+ */
+ description?: string;
+ /**
+ * Parameters for linked service.
+ */
+ parameters?: { [propertyName: string]: ParameterSpecification };
+ /**
+ * List of tags that can be used for describing the linked service.
+ */
+ annotations?: any[];
+ /**
+ * The URL of the SharePoint Online site. For example,
+ * https://contoso.sharepoint.com/sites/siteName. Type: string (or Expression with resultType
+ * string).
+ */
+ siteUrl: any;
+ /**
+ * The tenant ID under which your application resides. You can find it from Azure portal Active
+ * Directory overview page. Type: string (or Expression with resultType string).
+ */
+ tenantId: any;
+ /**
+ * The application (client) ID of your application registered in Azure Active Directory. Make
+ * sure to grant SharePoint site permission to this application. Type: string (or Expression with
+ * resultType string).
+ */
+ servicePrincipalId: any;
+ /**
+ * The client secret of your application registered in Azure Active Directory. Type: string (or
+ * Expression with resultType string).
+ */
+ servicePrincipalKey: SecretBaseUnion;
+ /**
+ * The encrypted credential used for authentication. Credentials are encrypted using the
+ * integration runtime credential manager. Type: string (or Expression with resultType string).
+ */
+ encryptedCredential?: any;
+}
+
+/**
+ * Snowflake linked service.
+ */
+export interface SnowflakeLinkedService {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "Snowflake";
+ /**
+ * The integration runtime reference.
+ */
+ connectVia?: IntegrationRuntimeReference;
+ /**
+ * Linked service description.
+ */
+ description?: string;
+ /**
+ * Parameters for linked service.
+ */
+ parameters?: { [propertyName: string]: ParameterSpecification };
+ /**
+ * List of tags that can be used for describing the linked service.
+ */
+ annotations?: any[];
+ /**
+ * The connection string of snowflake. Type: string, SecureString.
+ */
+ connectionString: any;
+ /**
+ * The Azure key vault secret reference of password in connection string.
+ */
+ password?: AzureKeyVaultSecretReference;
+ /**
+ * The encrypted credential used for authentication. Credentials are encrypted using the
+ * integration runtime credential manager. Type: string (or Expression with resultType string).
+ */
+ encryptedCredential?: any;
+}
+
/**
* Azure Function linked service.
*/
@@ -4663,7 +4771,7 @@ export interface SapHanaLinkedService {
/**
* Host name of the SAP HANA server. Type: string (or Expression with resultType string).
*/
- server: any;
+ server?: any;
/**
* The authentication type to be used to connect to the SAP HANA server. Possible values include:
* 'Basic', 'Windows'
@@ -6759,7 +6867,7 @@ export interface AzureFileStorageLinkedService {
/**
* Host name of the server. Type: string (or Expression with resultType string).
*/
- host: any;
+ host?: any;
/**
* User ID to logon the server. Type: string (or Expression with resultType string).
*/
@@ -6768,6 +6876,29 @@ export interface AzureFileStorageLinkedService {
* Password to logon the server.
*/
password?: SecretBaseUnion;
+ /**
+ * The connection string. It is mutually exclusive with sasUri property. Type: string,
+ * SecureString or AzureKeyVaultSecretReference.
+ */
+ connectionString?: any;
+ /**
+ * The Azure key vault secret reference of accountKey in connection string.
+ */
+ accountKey?: AzureKeyVaultSecretReference;
+ /**
+ * SAS URI of the Azure File resource. It is mutually exclusive with connectionString property.
+ * Type: string, SecureString or AzureKeyVaultSecretReference.
+ */
+ sasUri?: any;
+ /**
+ * The Azure key vault secret reference of sasToken in sas uri.
+ */
+ sasToken?: AzureKeyVaultSecretReference;
+ /**
+ * The azure file share name. It is required when auth with accountKey/sasToken. Type: string (or
+ * Expression with resultType string).
+ */
+ fileShare?: any;
/**
* The encrypted credential used for authentication. Credentials are encrypted using the
* integration runtime credential manager. Type: string (or Expression with resultType string).
@@ -7657,6 +7788,99 @@ export interface AzureStorageLinkedService {
encryptedCredential?: string;
}
+/**
+ * The sharepoint online list resource dataset.
+ */
+export interface SharePointOnlineListResourceDataset {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "SharePointOnlineListResource";
+ /**
+ * Dataset description.
+ */
+ description?: string;
+ /**
+ * Columns that define the structure of the dataset. Type: array (or Expression with resultType
+ * array), itemType: DatasetDataElement.
+ */
+ structure?: any;
+ /**
+ * Columns that define the physical type schema of the dataset. Type: array (or Expression with
+ * resultType array), itemType: DatasetSchemaDataElement.
+ */
+ schema?: any;
+ /**
+ * Linked service reference.
+ */
+ linkedServiceName: LinkedServiceReference;
+ /**
+ * Parameters for dataset.
+ */
+ parameters?: { [propertyName: string]: ParameterSpecification };
+ /**
+ * List of tags that can be used for describing the Dataset.
+ */
+ annotations?: any[];
+ /**
+ * The folder that this Dataset is in. If not specified, Dataset will appear at the root level.
+ */
+ folder?: DatasetFolder;
+ /**
+ * The name of the SharePoint Online list. Type: string (or Expression with resultType string).
+ */
+ listName?: any;
+}
+
+/**
+ * The snowflake dataset.
+ */
+export interface SnowflakeDataset {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "SnowflakeTable";
+ /**
+ * Dataset description.
+ */
+ description?: string;
+ /**
+ * Columns that define the structure of the dataset. Type: array (or Expression with resultType
+ * array), itemType: DatasetDataElement.
+ */
+ structure?: any;
+ /**
+ * Columns that define the physical type schema of the dataset. Type: array (or Expression with
+ * resultType array), itemType: DatasetSchemaDataElement.
+ */
+ schema?: any;
+ /**
+ * Linked service reference.
+ */
+ linkedServiceName: LinkedServiceReference;
+ /**
+ * Parameters for dataset.
+ */
+ parameters?: { [propertyName: string]: ParameterSpecification };
+ /**
+ * List of tags that can be used for describing the Dataset.
+ */
+ annotations?: any[];
+ /**
+ * The folder that this Dataset is in. If not specified, Dataset will appear at the root level.
+ */
+ folder?: DatasetFolder;
+ /**
+ * The schema name of the Snowflake database. Type: string (or Expression with resultType
+ * string).
+ */
+ snowflakeDatasetSchema?: any;
+ /**
+ * The table name of the Snowflake database. Type: string (or Expression with resultType string).
+ */
+ table?: any;
+}
+
/**
* Google AdWords service dataset.
*/
@@ -12120,6 +12344,66 @@ export interface OrcDataset {
orcCompressionCodec?: OrcCompressionCodec;
}
+/**
+ * Xml dataset.
+ */
+export interface XmlDataset {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "Xml";
+ /**
+ * Dataset description.
+ */
+ description?: string;
+ /**
+ * Columns that define the structure of the dataset. Type: array (or Expression with resultType
+ * array), itemType: DatasetDataElement.
+ */
+ structure?: any;
+ /**
+ * Columns that define the physical type schema of the dataset. Type: array (or Expression with
+ * resultType array), itemType: DatasetSchemaDataElement.
+ */
+ schema?: any;
+ /**
+ * Linked service reference.
+ */
+ linkedServiceName: LinkedServiceReference;
+ /**
+ * Parameters for dataset.
+ */
+ parameters?: { [propertyName: string]: ParameterSpecification };
+ /**
+ * List of tags that can be used for describing the Dataset.
+ */
+ annotations?: any[];
+ /**
+ * The folder that this Dataset is in. If not specified, Dataset will appear at the root level.
+ */
+ folder?: DatasetFolder;
+ /**
+ * The location of the json data storage.
+ */
+ location: DatasetLocationUnion;
+ /**
+ * The code page name of the preferred encoding. If not specified, the default value is UTF-8,
+ * unless BOM denotes another Unicode encoding. Refer to the name column of the table in the
+ * following link to set supported values:
+ * https://msdn.microsoft.com/library/system.text.encoding.aspx. Type: string (or Expression with
+ * resultType string).
+ */
+ encodingName?: any;
+ /**
+ * The null value string. Type: string (or Expression with resultType string).
+ */
+ nullValue?: any;
+ /**
+ * The data compression method used for the json dataset.
+ */
+ compression?: DatasetCompressionUnion;
+}
+
/**
* Json dataset.
*/
@@ -12304,13 +12588,13 @@ export interface ParquetDataset {
}
/**
- * Avro dataset.
+ * Excel dataset.
*/
-export interface AvroDataset {
+export interface ExcelDataset {
/**
* Polymorphic Discriminator
*/
- type: "Avro";
+ type: "Excel";
/**
* Dataset description.
*/
@@ -12342,13 +12626,79 @@ export interface AvroDataset {
*/
folder?: DatasetFolder;
/**
- * The location of the avro storage.
+ * The location of the excel storage.
*/
location: DatasetLocationUnion;
/**
- * Possible values include: 'none', 'deflate', 'snappy', 'xz', 'bzip2'
+ * The sheet of excel file. Type: string (or Expression with resultType string).
*/
- avroCompressionCodec?: AvroCompressionCodec;
+ sheetName: any;
+ /**
+ * The partial data of one sheet. Type: string (or Expression with resultType string).
+ */
+ range?: any;
+ /**
+ * When used as input, treat the first row of data as headers. When used as output,write the
+ * headers into the output as the first row of data. The default value is false. Type: boolean
+ * (or Expression with resultType boolean).
+ */
+ firstRowAsHeader?: any;
+ /**
+ * The data compression method used for the json dataset.
+ */
+ compression?: DatasetCompressionUnion;
+ /**
+ * The null value string. Type: string (or Expression with resultType string).
+ */
+ nullValue?: any;
+}
+
+/**
+ * Avro dataset.
+ */
+export interface AvroDataset {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "Avro";
+ /**
+ * Dataset description.
+ */
+ description?: string;
+ /**
+ * Columns that define the structure of the dataset. Type: array (or Expression with resultType
+ * array), itemType: DatasetDataElement.
+ */
+ structure?: any;
+ /**
+ * Columns that define the physical type schema of the dataset. Type: array (or Expression with
+ * resultType array), itemType: DatasetSchemaDataElement.
+ */
+ schema?: any;
+ /**
+ * Linked service reference.
+ */
+ linkedServiceName: LinkedServiceReference;
+ /**
+ * Parameters for dataset.
+ */
+ parameters?: { [propertyName: string]: ParameterSpecification };
+ /**
+ * List of tags that can be used for describing the Dataset.
+ */
+ annotations?: any[];
+ /**
+ * The folder that this Dataset is in. If not specified, Dataset will appear at the root level.
+ */
+ folder?: DatasetFolder;
+ /**
+ * The location of the avro storage.
+ */
+ location: DatasetLocationUnion;
+ /**
+ * Possible values include: 'none', 'deflate', 'snappy', 'xz', 'bzip2'
+ */
+ avroCompressionCodec?: AvroCompressionCodec;
avroCompressionLevel?: number;
}
@@ -13502,164 +13852,165 @@ export interface AzureMLBatchExecutionActivity {
}
/**
- * Activity to get metadata of dataset
+ * Contains the possible cases for CompressionReadSettings.
*/
-export interface GetMetadataActivity {
+export type CompressionReadSettingsUnion = CompressionReadSettings | ZipDeflateReadSettings;
+
+/**
+ * Compression read settings.
+ */
+export interface CompressionReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "GetMetadata";
- /**
- * Activity name.
- */
- name: string;
- /**
- * Activity description.
- */
- description?: string;
- /**
- * Activity depends on condition.
- */
- dependsOn?: ActivityDependency[];
- /**
- * Activity user properties.
- */
- userProperties?: UserProperty[];
- /**
- * Linked service reference.
- */
- linkedServiceName?: LinkedServiceReference;
- /**
- * Activity policy.
- */
- policy?: ActivityPolicy;
- /**
- * GetMetadata activity dataset reference.
- */
- dataset: DatasetReference;
+ type: "CompressionReadSettings";
/**
- * Fields of metadata to get from dataset.
+ * Describes unknown properties. The value of an unknown property can be of "any" type.
*/
- fieldList?: any[];
+ [property: string]: any;
}
/**
- * Web activity authentication properties.
+ * The ZipDeflate compression read settings.
*/
-export interface WebActivityAuthentication {
+export interface ZipDeflateReadSettings {
/**
- * Web activity authentication (Basic/ClientCertificate/MSI)
- */
- type: string;
- /**
- * Base64-encoded contents of a PFX file.
+ * Polymorphic Discriminator
*/
- pfx?: SecretBaseUnion;
+ type: "ZipDeflateReadSettings";
/**
- * Web activity authentication user name for basic authentication.
+ * Preserve the zip file name as folder path. Type: boolean (or Expression with resultType
+ * boolean).
*/
- username?: string;
+ preserveZipFileNameAsFolder?: any;
+}
+
+/**
+ * Contains the possible cases for FormatReadSettings.
+ */
+export type FormatReadSettingsUnion = FormatReadSettings | BinaryReadSettings | XmlReadSettings | JsonReadSettings | DelimitedTextReadSettings;
+
+/**
+ * Format read settings.
+ */
+export interface FormatReadSettings {
/**
- * Password for the PFX file or basic authentication.
+ * Polymorphic Discriminator
*/
- password?: SecretBaseUnion;
+ type: "FormatReadSettings";
/**
- * Resource for which Azure Auth token will be requested when using MSI Authentication.
+ * Describes unknown properties. The value of an unknown property can be of "any" type.
*/
- resource?: string;
+ [property: string]: any;
}
/**
- * Web activity.
+ * Binary read settings.
*/
-export interface WebActivity {
+export interface BinaryReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "WebActivity";
- /**
- * Activity name.
- */
- name: string;
+ type: "BinaryReadSettings";
/**
- * Activity description.
+ * Compression settings.
*/
- description?: string;
+ compressionProperties?: CompressionReadSettingsUnion;
+}
+
+/**
+ * Xml read settings.
+ */
+export interface XmlReadSettings {
/**
- * Activity depends on condition.
+ * Polymorphic Discriminator
*/
- dependsOn?: ActivityDependency[];
+ type: "XmlReadSettings";
/**
- * Activity user properties.
+ * Compression settings.
*/
- userProperties?: UserProperty[];
+ compressionProperties?: CompressionReadSettingsUnion;
/**
- * Linked service reference.
+ * Indicates what validation method is used when reading the xml files. Allowed values: 'none',
+ * 'xsd', or 'dtd'. Type: string (or Expression with resultType string).
*/
- linkedServiceName?: LinkedServiceReference;
+ validationMode?: any;
/**
- * Activity policy.
+ * Namespace uri to prefix mappings to override the prefixes in column names when namespace is
+ * enabled, if no prefix is defined for a namespace uri, the prefix of xml element/attribute name
+ * in the xml data file will be used. Example: "{"http://www.example.com/xml":"prefix"}" Type:
+ * object (or Expression with resultType object).
*/
- policy?: ActivityPolicy;
+ namespacePrefixes?: any;
+}
+
+/**
+ * Json read settings.
+ */
+export interface JsonReadSettings {
/**
- * Rest API method for target endpoint. Possible values include: 'GET', 'POST', 'PUT', 'DELETE'
+ * Polymorphic Discriminator
*/
- method: WebActivityMethod;
+ type: "JsonReadSettings";
/**
- * Web activity target endpoint and path. Type: string (or Expression with resultType string).
+ * Compression settings.
*/
- url: any;
+ compressionProperties?: CompressionReadSettingsUnion;
+}
+
+/**
+ * Delimited text read settings.
+ */
+export interface DelimitedTextReadSettings {
/**
- * Represents the headers that will be sent to the request. For example, to set the language and
- * type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type":
- * "application/json" }. Type: string (or Expression with resultType string).
+ * Polymorphic Discriminator
*/
- headers?: any;
+ type: "DelimitedTextReadSettings";
/**
- * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not
- * allowed for GET method Type: string (or Expression with resultType string).
+ * Indicates the number of non-empty rows to skip when reading data from input files. Type:
+ * integer (or Expression with resultType integer).
*/
- body?: any;
+ skipLineCount?: any;
/**
- * Authentication method used for calling the endpoint.
+ * Compression settings.
*/
- authentication?: WebActivityAuthentication;
+ compressionProperties?: CompressionReadSettingsUnion;
+}
+
+/**
+ * Distcp settings.
+ */
+export interface DistcpSettings {
/**
- * List of datasets passed to web endpoint.
+ * Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType
+ * string).
*/
- datasets?: DatasetReference[];
+ resourceManagerEndpoint: any;
/**
- * List of linked services passed to web endpoint.
+ * Specifies an existing folder path which will be used to store temp Distcp command script. The
+ * script file is generated by ADF and will be removed after Copy job finished. Type: string (or
+ * Expression with resultType string).
*/
- linkedServices?: LinkedServiceReference[];
+ tempScriptPath: any;
/**
- * The integration runtime reference.
+ * Specifies the Distcp options. Type: string (or Expression with resultType string).
*/
- connectVia?: IntegrationRuntimeReference;
+ distcpOptions?: any;
}
/**
- * Contains the possible cases for CopySource.
+ * Contains the possible cases for StoreReadSettings.
*/
-export type CopySourceUnion = CopySource | HttpSource | AzureBlobFSSource | AzureDataLakeStoreSource | Office365Source | CosmosDbMongoDbApiSource | MongoDbV2Source | MongoDbSource | WebSource | OracleSource | AzureDataExplorerSource | HdfsSource | FileSystemSource | RestSource | SalesforceServiceCloudSource | ODataSource | MicrosoftAccessSource | RelationalSource | CommonDataServiceForAppsSource | DynamicsCrmSource | DynamicsSource | CosmosDbSqlApiSource | DocumentDbCollectionSource | BlobSource | TabularSourceUnion | BinarySource | OrcSource | JsonSource | DelimitedTextSource | ParquetSource | AvroSource;
+export type StoreReadSettingsUnion = StoreReadSettings | HdfsReadSettings | HttpReadSettings | SftpReadSettings | FtpReadSettings | GoogleCloudStorageReadSettings | AzureFileStorageReadSettings | FileServerReadSettings | AmazonS3ReadSettings | AzureDataLakeStoreReadSettings | AzureBlobFSReadSettings | AzureBlobStorageReadSettings;
/**
- * A copy activity source.
+ * Connector read setting.
*/
-export interface CopySource {
+export interface StoreReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "CopySource";
- /**
- * Source retry count. Type: integer (or Expression with resultType integer).
- */
- sourceRetryCount?: any;
- /**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
- */
- sourceRetryWait?: any;
+ type: "StoreReadSettings";
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
@@ -13672,400 +14023,730 @@ export interface CopySource {
}
/**
- * A copy activity source for an HTTP file.
+ * HDFS read settings.
*/
-export interface HttpSource {
+export interface HdfsReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "HttpSource";
- /**
- * Source retry count. Type: integer (or Expression with resultType integer).
- */
- sourceRetryCount?: any;
- /**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
- */
- sourceRetryWait?: any;
+ type: "HdfsReadSettings";
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default
- * value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with
- * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- httpRequestTimeout?: any;
-}
-
-/**
- * A copy activity Azure BlobFS source.
- */
-export interface AzureBlobFSSource {
+ recursive?: any;
/**
- * Polymorphic Discriminator
+ * HDFS wildcardFolderPath. Type: string (or Expression with resultType string).
*/
- type: "AzureBlobFSSource";
+ wildcardFolderPath?: any;
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * HDFS wildcardFileName. Type: string (or Expression with resultType string).
*/
- sourceRetryCount?: any;
+ wildcardFileName?: any;
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
*/
- sourceRetryWait?: any;
+ fileListPath?: any;
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * Indicates whether to enable partition discovery.
*/
- maxConcurrentConnections?: any;
+ enablePartitionDiscovery?: boolean;
/**
- * Treat empty as null. Type: boolean (or Expression with resultType boolean).
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
*/
- treatEmptyAsNull?: any;
+ partitionRootPath?: any;
/**
- * Number of header lines to skip from each blob. Type: integer (or Expression with resultType
- * integer).
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
*/
- skipHeaderLineCount?: any;
+ modifiedDatetimeStart?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
*/
- recursive?: any;
+ modifiedDatetimeEnd?: any;
+ /**
+ * Specifies Distcp-related settings.
+ */
+ distcpSettings?: DistcpSettings;
}
/**
- * A copy activity Azure Data Lake source.
+ * Sftp read settings.
*/
-export interface AzureDataLakeStoreSource {
+export interface HttpReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "AzureDataLakeStoreSource";
+ type: "HttpReadSettings";
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- sourceRetryCount?: any;
+ maxConcurrentConnections?: any;
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression
+ * with resultType string).
*/
- sourceRetryWait?: any;
+ requestMethod?: any;
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression
+ * with resultType string).
*/
- maxConcurrentConnections?: any;
+ requestBody?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
+ * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression
+ * with resultType string).
*/
- recursive?: any;
+ additionalHeaders?: any;
+ /**
+ * Specifies the timeout for a HTTP client to get HTTP response from HTTP server.
+ */
+ requestTimeout?: any;
+ /**
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
+ */
+ partitionRootPath?: any;
}
/**
- * A copy activity source for an Office 365 service.
+ * Sftp read settings.
*/
-export interface Office365Source {
+export interface SftpReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "Office365Source";
- /**
- * Source retry count. Type: integer (or Expression with resultType integer).
- */
- sourceRetryCount?: any;
- /**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
- */
- sourceRetryWait?: any;
+ type: "SftpReadSettings";
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * The groups containing all the users. Type: array of strings (or Expression with resultType
- * array of strings).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- allowedGroups?: any;
+ recursive?: any;
/**
- * The user scope uri. Type: string (or Expression with resultType string).
+ * Sftp wildcardFolderPath. Type: string (or Expression with resultType string).
*/
- userScopeFilterUri?: any;
+ wildcardFolderPath?: any;
/**
- * The Column to apply the and . Type:
- * string (or Expression with resultType string).
+ * Sftp wildcardFileName. Type: string (or Expression with resultType string).
*/
- dateFilterColumn?: any;
+ wildcardFileName?: any;
/**
- * Start time of the requested range for this dataset. Type: string (or Expression with
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
* resultType string).
*/
- startTime?: any;
+ partitionRootPath?: any;
/**
- * End time of the requested range for this dataset. Type: string (or Expression with resultType
- * string).
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
*/
- endTime?: any;
+ fileListPath?: any;
/**
- * The columns to be read out from the Office 365 table. Type: array of objects (or Expression
- * with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" }
- * ]
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
*/
- outputColumns?: any;
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
}
/**
- * Specify the column name and value of additional columns.
+ * Ftp read settings.
*/
-export interface AdditionalColumns {
+export interface FtpReadSettings {
/**
- * Additional column name. Type: string (or Expression with resultType string).
+ * Polymorphic Discriminator
*/
- name?: any;
+ type: "FtpReadSettings";
/**
- * Additional column value. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- value?: any;
-}
-
-/**
- * Cursor methods for Mongodb query
- */
-export interface MongoDbCursorMethodsProperties {
+ maxConcurrentConnections?: any;
/**
- * Specifies the fields to return in the documents that match the query filter. To return all
- * fields in the matching documents, omit this parameter. Type: string (or Expression with
- * resultType string).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- project?: any;
+ recursive?: any;
/**
- * Specifies the order in which the query returns matching documents. Type: string (or Expression
- * with resultType string). Type: string (or Expression with resultType string).
+ * Ftp wildcardFolderPath. Type: string (or Expression with resultType string).
*/
- sort?: any;
+ wildcardFolderPath?: any;
/**
- * Specifies the how many documents skipped and where MongoDB begins returning results. This
- * approach may be useful in implementing paginated results. Type: integer (or Expression with
- * resultType integer).
+ * Ftp wildcardFileName. Type: string (or Expression with resultType string).
*/
- skip?: any;
+ wildcardFileName?: any;
/**
- * Specifies the maximum number of documents the server returns. limit() is analogous to the
- * LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer).
+ * Indicates whether to enable partition discovery.
*/
- limit?: any;
+ enablePartitionDiscovery?: boolean;
/**
- * Describes unknown properties. The value of an unknown property can be of "any" type.
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
*/
- [property: string]: any;
+ partitionRootPath?: any;
+ /**
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ */
+ fileListPath?: any;
+ /**
+ * Specify whether to use binary transfer mode for FTP stores.
+ */
+ useBinaryTransfer?: boolean;
}
/**
- * A copy activity source for a CosmosDB (MongoDB API) database.
+ * Google Cloud Storage read settings.
*/
-export interface CosmosDbMongoDbApiSource {
+export interface GoogleCloudStorageReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "CosmosDbMongoDbApiSource";
+ type: "GoogleCloudStorageReadSettings";
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- sourceRetryCount?: any;
+ maxConcurrentConnections?: any;
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- sourceRetryWait?: any;
+ recursive?: any;
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string).
*/
- maxConcurrentConnections?: any;
+ wildcardFolderPath?: any;
/**
- * Specifies selection filter using query operators. To return all documents in a collection,
- * omit this parameter or pass an empty document ({}). Type: string (or Expression with
+ * Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string).
+ */
+ wildcardFileName?: any;
+ /**
+ * The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with
* resultType string).
*/
- filter?: any;
+ prefix?: any;
/**
- * Cursor methods for Mongodb query.
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
*/
- cursorMethods?: MongoDbCursorMethodsProperties;
+ fileListPath?: any;
/**
- * Specifies the number of documents to return in each batch of the response from MongoDB
- * instance. In most cases, modifying the batch size will not affect the user or the application.
- * This property's main purpose is to avoid hit the limitation of response size. Type: integer
- * (or Expression with resultType integer).
+ * Indicates whether to enable partition discovery.
*/
- batchSize?: any;
+ enablePartitionDiscovery?: boolean;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
*/
- queryTimeout?: any;
+ partitionRootPath?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
*/
- additionalColumns?: AdditionalColumns[];
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
}
/**
- * A copy activity source for a MongoDB database.
+ * Azure File Storage read settings.
*/
-export interface MongoDbV2Source {
+export interface AzureFileStorageReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "MongoDbV2Source";
- /**
- * Source retry count. Type: integer (or Expression with resultType integer).
- */
- sourceRetryCount?: any;
- /**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
- */
- sourceRetryWait?: any;
+ type: "AzureFileStorageReadSettings";
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * Specifies selection filter using query operators. To return all documents in a collection,
- * omit this parameter or pass an empty document ({}). Type: string (or Expression with
- * resultType string).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- filter?: any;
+ recursive?: any;
/**
- * Cursor methods for Mongodb query
+ * Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string).
*/
- cursorMethods?: MongoDbCursorMethodsProperties;
+ wildcardFolderPath?: any;
/**
- * Specifies the number of documents to return in each batch of the response from MongoDB
- * instance. In most cases, modifying the batch size will not affect the user or the application.
- * This property's main purpose is to avoid hit the limitation of response size. Type: integer
- * (or Expression with resultType integer).
+ * Azure File Storage wildcardFileName. Type: string (or Expression with resultType string).
*/
- batchSize?: any;
+ wildcardFileName?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * The prefix filter for the Azure File name starting from root path. Type: string (or Expression
+ * with resultType string).
*/
- queryTimeout?: any;
+ prefix?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
*/
- additionalColumns?: AdditionalColumns[];
+ fileListPath?: any;
+ /**
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
+ */
+ partitionRootPath?: any;
+ /**
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
}
/**
- * A copy activity source for a MongoDB database.
+ * File server read settings.
*/
-export interface MongoDbSource {
+export interface FileServerReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "MongoDbSource";
+ type: "FileServerReadSettings";
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- sourceRetryCount?: any;
+ maxConcurrentConnections?: any;
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- sourceRetryWait?: any;
+ recursive?: any;
+ /**
+ * FileServer wildcardFolderPath. Type: string (or Expression with resultType string).
+ */
+ wildcardFolderPath?: any;
+ /**
+ * FileServer wildcardFileName. Type: string (or Expression with resultType string).
+ */
+ wildcardFileName?: any;
+ /**
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ */
+ fileListPath?: any;
+ /**
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
+ */
+ partitionRootPath?: any;
+ /**
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
+ /**
+ * Specify a filter to be used to select a subset of files in the folderPath rather than all
+ * files. Type: string (or Expression with resultType string).
+ */
+ fileFilter?: any;
+}
+
+/**
+ * Azure data lake store read settings.
+ */
+export interface AmazonS3ReadSettings {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "AmazonS3ReadSettings";
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * Database query. Should be a SQL-92 query expression. Type: string (or Expression with
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
+ */
+ recursive?: any;
+ /**
+ * AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string).
+ */
+ wildcardFolderPath?: any;
+ /**
+ * AmazonS3 wildcardFileName. Type: string (or Expression with resultType string).
+ */
+ wildcardFileName?: any;
+ /**
+ * The prefix filter for the S3 object name. Type: string (or Expression with resultType string).
+ */
+ prefix?: any;
+ /**
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ */
+ fileListPath?: any;
+ /**
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
* resultType string).
*/
- query?: any;
+ partitionRootPath?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
*/
- additionalColumns?: AdditionalColumns[];
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
}
/**
- * A copy activity source for web page table.
+ * Azure data lake store read settings.
*/
-export interface WebSource {
+export interface AzureDataLakeStoreReadSettings {
/**
* Polymorphic Discriminator
*/
- type: "WebSource";
+ type: "AzureDataLakeStoreReadSettings";
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- sourceRetryCount?: any;
+ maxConcurrentConnections?: any;
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- sourceRetryWait?: any;
+ recursive?: any;
+ /**
+ * ADLS wildcardFolderPath. Type: string (or Expression with resultType string).
+ */
+ wildcardFolderPath?: any;
+ /**
+ * ADLS wildcardFileName. Type: string (or Expression with resultType string).
+ */
+ wildcardFileName?: any;
+ /**
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ */
+ fileListPath?: any;
+ /**
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
+ */
+ partitionRootPath?: any;
+ /**
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
+}
+
+/**
+ * Azure blobFS read settings.
+ */
+export interface AzureBlobFSReadSettings {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "AzureBlobFSReadSettings";
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- additionalColumns?: AdditionalColumns[];
+ recursive?: any;
+ /**
+ * Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string).
+ */
+ wildcardFolderPath?: any;
+ /**
+ * Azure blobFS wildcardFileName. Type: string (or Expression with resultType string).
+ */
+ wildcardFileName?: any;
+ /**
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ */
+ fileListPath?: any;
+ /**
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
+ */
+ partitionRootPath?: any;
+ /**
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
}
/**
- * The settings that will be leveraged for Oracle source partitioning.
+ * Azure blob read settings.
*/
-export interface OraclePartitionSettings {
+export interface AzureBlobStorageReadSettings {
/**
- * Names of the physical partitions of Oracle table.
+ * Polymorphic Discriminator
*/
- partitionNames?: any;
+ type: "AzureBlobStorageReadSettings";
/**
- * The name of the column in integer type that will be used for proceeding range partitioning.
- * Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- partitionColumnName?: any;
+ maxConcurrentConnections?: any;
/**
- * The maximum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
+ */
+ recursive?: any;
+ /**
+ * Azure blob wildcardFolderPath. Type: string (or Expression with resultType string).
+ */
+ wildcardFolderPath?: any;
+ /**
+ * Azure blob wildcardFileName. Type: string (or Expression with resultType string).
+ */
+ wildcardFileName?: any;
+ /**
+ * The prefix filter for the Azure Blob name. Type: string (or Expression with resultType
+ * string).
+ */
+ prefix?: any;
+ /**
+ * Point to a text file that lists each file (relative path to the path configured in the
+ * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ */
+ fileListPath?: any;
+ /**
+ * Indicates whether to enable partition discovery.
+ */
+ enablePartitionDiscovery?: boolean;
+ /**
+ * Specify the root path where partition discovery starts from. Type: string (or Expression with
+ * resultType string).
+ */
+ partitionRootPath?: any;
+ /**
+ * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeStart?: any;
+ /**
+ * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ */
+ modifiedDatetimeEnd?: any;
+}
+
+/**
+ * Activity to get metadata of dataset
+ */
+export interface GetMetadataActivity {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "GetMetadata";
+ /**
+ * Activity name.
+ */
+ name: string;
+ /**
+ * Activity description.
+ */
+ description?: string;
+ /**
+ * Activity depends on condition.
+ */
+ dependsOn?: ActivityDependency[];
+ /**
+ * Activity user properties.
+ */
+ userProperties?: UserProperty[];
+ /**
+ * Linked service reference.
+ */
+ linkedServiceName?: LinkedServiceReference;
+ /**
+ * Activity policy.
+ */
+ policy?: ActivityPolicy;
+ /**
+ * GetMetadata activity dataset reference.
+ */
+ dataset: DatasetReference;
+ /**
+ * Fields of metadata to get from dataset.
+ */
+ fieldList?: any[];
+ /**
+ * GetMetadata activity store settings.
+ */
+ storeSettings?: StoreReadSettingsUnion;
+ /**
+ * GetMetadata activity format settings.
+ */
+ formatSettings?: FormatReadSettingsUnion;
+}
+
+/**
+ * Web activity authentication properties.
+ */
+export interface WebActivityAuthentication {
+ /**
+ * Web activity authentication (Basic/ClientCertificate/MSI)
+ */
+ type: string;
+ /**
+ * Base64-encoded contents of a PFX file.
+ */
+ pfx?: SecretBaseUnion;
+ /**
+ * Web activity authentication user name for basic authentication.
+ */
+ username?: string;
+ /**
+ * Password for the PFX file or basic authentication.
+ */
+ password?: SecretBaseUnion;
+ /**
+ * Resource for which Azure Auth token will be requested when using MSI Authentication.
+ */
+ resource?: string;
+}
+
+/**
+ * Web activity.
+ */
+export interface WebActivity {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "WebActivity";
+ /**
+ * Activity name.
+ */
+ name: string;
+ /**
+ * Activity description.
+ */
+ description?: string;
+ /**
+ * Activity depends on condition.
+ */
+ dependsOn?: ActivityDependency[];
+ /**
+ * Activity user properties.
+ */
+ userProperties?: UserProperty[];
+ /**
+ * Linked service reference.
+ */
+ linkedServiceName?: LinkedServiceReference;
+ /**
+ * Activity policy.
+ */
+ policy?: ActivityPolicy;
+ /**
+ * Rest API method for target endpoint. Possible values include: 'GET', 'POST', 'PUT', 'DELETE'
+ */
+ method: WebActivityMethod;
+ /**
+ * Web activity target endpoint and path. Type: string (or Expression with resultType string).
+ */
+ url: any;
+ /**
+ * Represents the headers that will be sent to the request. For example, to set the language and
+ * type on a request: "headers" : { "Accept-Language": "en-us", "Content-Type":
+ * "application/json" }. Type: string (or Expression with resultType string).
+ */
+ headers?: any;
+ /**
+ * Represents the payload that will be sent to the endpoint. Required for POST/PUT method, not
+ * allowed for GET method Type: string (or Expression with resultType string).
+ */
+ body?: any;
+ /**
+ * Authentication method used for calling the endpoint.
+ */
+ authentication?: WebActivityAuthentication;
+ /**
+ * List of datasets passed to web endpoint.
+ */
+ datasets?: DatasetReference[];
+ /**
+ * List of linked services passed to web endpoint.
*/
- partitionUpperBound?: any;
+ linkedServices?: LinkedServiceReference[];
/**
- * The minimum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
+ * The integration runtime reference.
*/
- partitionLowerBound?: any;
+ connectVia?: IntegrationRuntimeReference;
}
/**
- * A copy activity Oracle source.
+ * Contains the possible cases for CopySource.
*/
-export interface OracleSource {
+export type CopySourceUnion = CopySource | SharePointOnlineListSource | SnowflakeSource | HttpSource | AzureBlobFSSource | AzureDataLakeStoreSource | Office365Source | CosmosDbMongoDbApiSource | MongoDbV2Source | MongoDbSource | WebSource | OracleSource | AzureDataExplorerSource | HdfsSource | FileSystemSource | RestSource | SalesforceServiceCloudSource | ODataSource | MicrosoftAccessSource | RelationalSource | CommonDataServiceForAppsSource | DynamicsCrmSource | DynamicsSource | CosmosDbSqlApiSource | DocumentDbCollectionSource | BlobSource | TabularSourceUnion | BinarySource | OrcSource | XmlSource | JsonSource | DelimitedTextSource | ParquetSource | ExcelSource | AvroSource;
+
+/**
+ * A copy activity source.
+ */
+export interface CopySource {
/**
* Polymorphic Discriminator
*/
- type: "OracleSource";
+ type: "CopySource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14081,38 +14762,19 @@ export interface OracleSource {
*/
maxConcurrentConnections?: any;
/**
- * Oracle reader query. Type: string (or Expression with resultType string).
- */
- oracleReaderQuery?: any;
- /**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
- */
- queryTimeout?: any;
- /**
- * The partition mechanism that will be used for Oracle read in parallel. Possible values
- * include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange'
- */
- partitionOption?: OraclePartitionOption;
- /**
- * The settings that will be leveraged for Oracle source partitioning.
- */
- partitionSettings?: OraclePartitionSettings;
- /**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * Describes unknown properties. The value of an unknown property can be of "any" type.
*/
- additionalColumns?: AdditionalColumns[];
+ [property: string]: any;
}
/**
- * A copy activity Azure Data Explorer (Kusto) source.
+ * A copy activity source for sharePoint online list source.
*/
-export interface AzureDataExplorerSource {
+export interface SharePointOnlineListSource {
/**
* Polymorphic Discriminator
*/
- type: "AzureDataExplorerSource";
+ type: "SharePointOnlineListSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14128,56 +14790,67 @@ export interface AzureDataExplorerSource {
*/
maxConcurrentConnections?: any;
/**
- * Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with
- * resultType string).
+ * The OData query to filter the data in SharePoint Online list. For example, "$top=1". Type:
+ * string (or Expression with resultType string).
*/
- query: any;
+ query?: any;
/**
- * The name of the Boolean option that controls whether truncation is applied to result-sets that
- * go beyond a certain row-count limit.
+ * The wait time to get a response from SharePoint Online. Default value is 5 minutes (00:05:00).
+ * Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- noTruncation?: any;
+ httpRequestTimeout?: any;
+}
+
+/**
+ * Contains the possible cases for ExportSettings.
+ */
+export type ExportSettingsUnion = ExportSettings | SnowflakeExportCopyCommand;
+
+/**
+ * Export command settings.
+ */
+export interface ExportSettings {
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..
+ * Polymorphic Discriminator
*/
- queryTimeout?: any;
+ type: "ExportSettings";
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * Describes unknown properties. The value of an unknown property can be of "any" type.
*/
- additionalColumns?: AdditionalColumns[];
+ [property: string]: any;
}
/**
- * Distcp settings.
+ * Snowflake export command settings.
*/
-export interface DistcpSettings {
+export interface SnowflakeExportCopyCommand {
/**
- * Specifies the Yarn ResourceManager endpoint. Type: string (or Expression with resultType
- * string).
+ * Polymorphic Discriminator
*/
- resourceManagerEndpoint: any;
+ type: "SnowflakeExportCopyCommand";
/**
- * Specifies an existing folder path which will be used to store temp Distcp command script. The
- * script file is generated by ADF and will be removed after Copy job finished. Type: string (or
- * Expression with resultType string).
+ * Additional copy options directly passed to snowflake Copy Command. Type: key value pairs
+ * (value should be string type) (or Expression with resultType object). Example:
+ * "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }
*/
- tempScriptPath: any;
+ additionalCopyOptions?: { [propertyName: string]: any };
/**
- * Specifies the Distcp options. Type: string (or Expression with resultType string).
+ * Additional format options directly passed to snowflake Copy Command. Type: key value pairs
+ * (value should be string type) (or Expression with resultType object). Example:
+ * "additionalFormatOptions": { "OVERWRITE": "TRUE", "MAX_FILE_SIZE": "'FALSE'" }
*/
- distcpOptions?: any;
+ additionalFormatOptions?: { [propertyName: string]: any };
}
/**
- * A copy activity HDFS source.
+ * A copy activity snowflake source.
*/
-export interface HdfsSource {
+export interface SnowflakeSource {
/**
* Polymorphic Discriminator
*/
- type: "HdfsSource";
+ type: "SnowflakeSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14193,24 +14866,23 @@ export interface HdfsSource {
*/
maxConcurrentConnections?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
+ * Snowflake Sql query. Type: string (or Expression with resultType string).
*/
- recursive?: any;
+ query?: any;
/**
- * Specifies Distcp-related settings.
+ * Snowflake export settings.
*/
- distcpSettings?: DistcpSettings;
+ exportSettings?: SnowflakeExportCopyCommand;
}
/**
- * A copy activity file system source.
+ * A copy activity source for an HTTP file.
*/
-export interface FileSystemSource {
+export interface HttpSource {
/**
* Polymorphic Discriminator
*/
- type: "FileSystemSource";
+ type: "HttpSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14226,25 +14898,21 @@ export interface FileSystemSource {
*/
maxConcurrentConnections?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
- /**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * Specifies the timeout for a HTTP client to get HTTP response from HTTP server. The default
+ * value is equivalent to System.Net.HttpWebRequest.Timeout. Type: string (or Expression with
+ * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- additionalColumns?: AdditionalColumns[];
+ httpRequestTimeout?: any;
}
/**
- * A copy activity Rest service source.
+ * A copy activity Azure BlobFS source.
*/
-export interface RestSource {
+export interface AzureBlobFSSource {
/**
* Polymorphic Discriminator
*/
- type: "RestSource";
+ type: "AzureBlobFSSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14260,50 +14928,29 @@ export interface RestSource {
*/
maxConcurrentConnections?: any;
/**
- * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression
- * with resultType string).
- */
- requestMethod?: any;
- /**
- * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression
- * with resultType string).
- */
- requestBody?: any;
- /**
- * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression
- * with resultType string).
- */
- additionalHeaders?: any;
- /**
- * The pagination rules to compose next page requests. Type: string (or Expression with
- * resultType string).
- */
- paginationRules?: any;
- /**
- * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the
- * timeout to read response data. Default value: 00:01:40. Type: string (or Expression with
- * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Treat empty as null. Type: boolean (or Expression with resultType boolean).
*/
- httpRequestTimeout?: any;
+ treatEmptyAsNull?: any;
/**
- * The time to await before sending next page request.
+ * Number of header lines to skip from each blob. Type: integer (or Expression with resultType
+ * integer).
*/
- requestInterval?: any;
+ skipHeaderLineCount?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- additionalColumns?: AdditionalColumns[];
+ recursive?: any;
}
/**
- * A copy activity Salesforce Service Cloud source.
+ * A copy activity Azure Data Lake source.
*/
-export interface SalesforceServiceCloudSource {
+export interface AzureDataLakeStoreSource {
/**
* Polymorphic Discriminator
*/
- type: "SalesforceServiceCloudSource";
+ type: "AzureDataLakeStoreSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14319,29 +14966,20 @@ export interface SalesforceServiceCloudSource {
*/
maxConcurrentConnections?: any;
/**
- * Database query. Type: string (or Expression with resultType string).
- */
- query?: any;
- /**
- * The read behavior for the operation. Default is Query. Possible values include: 'Query',
- * 'QueryAll'
- */
- readBehavior?: SalesforceSourceReadBehavior;
- /**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- additionalColumns?: AdditionalColumns[];
+ recursive?: any;
}
/**
- * A copy activity source for OData source.
+ * A copy activity source for an Office 365 service.
*/
-export interface ODataSource {
+export interface Office365Source {
/**
* Polymorphic Discriminator
*/
- type: "ODataSource";
+ type: "Office365Source";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14357,57 +14995,91 @@ export interface ODataSource {
*/
maxConcurrentConnections?: any;
/**
- * OData query. For example, "$top=1". Type: string (or Expression with resultType string).
+ * The groups containing all the users. Type: array of strings (or Expression with resultType
+ * array of strings).
*/
- query?: any;
+ allowedGroups?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * The user scope uri. Type: string (or Expression with resultType string).
*/
- additionalColumns?: AdditionalColumns[];
+ userScopeFilterUri?: any;
+ /**
+ * The Column to apply the and . Type:
+ * string (or Expression with resultType string).
+ */
+ dateFilterColumn?: any;
+ /**
+ * Start time of the requested range for this dataset. Type: string (or Expression with
+ * resultType string).
+ */
+ startTime?: any;
+ /**
+ * End time of the requested range for this dataset. Type: string (or Expression with resultType
+ * string).
+ */
+ endTime?: any;
+ /**
+ * The columns to be read out from the Office 365 table. Type: array of objects (or Expression
+ * with resultType array of objects). Example: [ { "name": "Id" }, { "name": "CreatedDateTime" }
+ * ]
+ */
+ outputColumns?: any;
}
/**
- * A copy activity source for Microsoft Access.
+ * Specify the column name and value of additional columns.
*/
-export interface MicrosoftAccessSource {
+export interface AdditionalColumns {
/**
- * Polymorphic Discriminator
+ * Additional column name. Type: string (or Expression with resultType string).
*/
- type: "MicrosoftAccessSource";
+ name?: any;
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * Additional column value. Type: string (or Expression with resultType string).
*/
- sourceRetryCount?: any;
+ value?: any;
+}
+
+/**
+ * Cursor methods for Mongodb query
+ */
+export interface MongoDbCursorMethodsProperties {
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Specifies the fields to return in the documents that match the query filter. To return all
+ * fields in the matching documents, omit this parameter. Type: string (or Expression with
+ * resultType string).
*/
- sourceRetryWait?: any;
+ project?: any;
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * Specifies the order in which the query returns matching documents. Type: string (or Expression
+ * with resultType string). Type: string (or Expression with resultType string).
*/
- maxConcurrentConnections?: any;
+ sort?: any;
/**
- * Database query. Type: string (or Expression with resultType string).
+ * Specifies the how many documents skipped and where MongoDB begins returning results. This
+ * approach may be useful in implementing paginated results. Type: integer (or Expression with
+ * resultType integer).
*/
- query?: any;
+ skip?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * Specifies the maximum number of documents the server returns. limit() is analogous to the
+ * LIMIT statement in a SQL database. Type: integer (or Expression with resultType integer).
*/
- additionalColumns?: AdditionalColumns[];
+ limit?: any;
+ /**
+ * Describes unknown properties. The value of an unknown property can be of "any" type.
+ */
+ [property: string]: any;
}
/**
- * A copy activity source for various relational databases.
+ * A copy activity source for a CosmosDB (MongoDB API) database.
*/
-export interface RelationalSource {
+export interface CosmosDbMongoDbApiSource {
/**
* Polymorphic Discriminator
*/
- type: "RelationalSource";
+ type: "CosmosDbMongoDbApiSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14423,9 +15095,27 @@ export interface RelationalSource {
*/
maxConcurrentConnections?: any;
/**
- * Database query. Type: string (or Expression with resultType string).
+ * Specifies selection filter using query operators. To return all documents in a collection,
+ * omit this parameter or pass an empty document ({}). Type: string (or Expression with
+ * resultType string).
*/
- query?: any;
+ filter?: any;
+ /**
+ * Cursor methods for Mongodb query.
+ */
+ cursorMethods?: MongoDbCursorMethodsProperties;
+ /**
+ * Specifies the number of documents to return in each batch of the response from MongoDB
+ * instance. In most cases, modifying the batch size will not affect the user or the application.
+ * This property's main purpose is to avoid hit the limitation of response size. Type: integer
+ * (or Expression with resultType integer).
+ */
+ batchSize?: any;
+ /**
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ queryTimeout?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
@@ -14434,13 +15124,13 @@ export interface RelationalSource {
}
/**
- * A copy activity Common Data Service for Apps source.
+ * A copy activity source for a MongoDB database.
*/
-export interface CommonDataServiceForAppsSource {
+export interface MongoDbV2Source {
/**
* Polymorphic Discriminator
*/
- type: "CommonDataServiceForAppsSource";
+ type: "MongoDbV2Source";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14456,10 +15146,27 @@ export interface CommonDataServiceForAppsSource {
*/
maxConcurrentConnections?: any;
/**
- * FetchXML is a proprietary query language that is used in Microsoft Common Data Service for
- * Apps (online & on-premises). Type: string (or Expression with resultType string).
+ * Specifies selection filter using query operators. To return all documents in a collection,
+ * omit this parameter or pass an empty document ({}). Type: string (or Expression with
+ * resultType string).
*/
- query?: any;
+ filter?: any;
+ /**
+ * Cursor methods for Mongodb query
+ */
+ cursorMethods?: MongoDbCursorMethodsProperties;
+ /**
+ * Specifies the number of documents to return in each batch of the response from MongoDB
+ * instance. In most cases, modifying the batch size will not affect the user or the application.
+ * This property's main purpose is to avoid hit the limitation of response size. Type: integer
+ * (or Expression with resultType integer).
+ */
+ batchSize?: any;
+ /**
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ queryTimeout?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
@@ -14468,13 +15175,13 @@ export interface CommonDataServiceForAppsSource {
}
/**
- * A copy activity Dynamics CRM source.
+ * A copy activity source for a MongoDB database.
*/
-export interface DynamicsCrmSource {
+export interface MongoDbSource {
/**
* Polymorphic Discriminator
*/
- type: "DynamicsCrmSource";
+ type: "MongoDbSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14490,8 +15197,8 @@ export interface DynamicsCrmSource {
*/
maxConcurrentConnections?: any;
/**
- * FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online &
- * on-premises). Type: string (or Expression with resultType string).
+ * Database query. Should be a SQL-92 query expression. Type: string (or Expression with
+ * resultType string).
*/
query?: any;
/**
@@ -14502,13 +15209,13 @@ export interface DynamicsCrmSource {
}
/**
- * A copy activity Dynamics source.
+ * A copy activity source for web page table.
*/
-export interface DynamicsSource {
+export interface WebSource {
/**
* Polymorphic Discriminator
*/
- type: "DynamicsSource";
+ type: "WebSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14523,11 +15230,6 @@ export interface DynamicsSource {
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
- /**
- * FetchXML is a proprietary query language that is used in Microsoft Dynamics (online &
- * on-premises). Type: string (or Expression with resultType string).
- */
- query?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
@@ -14536,13 +15238,38 @@ export interface DynamicsSource {
}
/**
- * A copy activity Azure CosmosDB (SQL API) Collection source.
+ * The settings that will be leveraged for Oracle source partitioning.
*/
-export interface CosmosDbSqlApiSource {
+export interface OraclePartitionSettings {
+ /**
+ * Names of the physical partitions of Oracle table.
+ */
+ partitionNames?: any;
+ /**
+ * The name of the column in integer type that will be used for proceeding range partitioning.
+ * Type: string (or Expression with resultType string).
+ */
+ partitionColumnName?: any;
+ /**
+ * The maximum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
+ */
+ partitionUpperBound?: any;
+ /**
+ * The minimum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
+ */
+ partitionLowerBound?: any;
+}
+
+/**
+ * A copy activity Oracle source.
+ */
+export interface OracleSource {
/**
* Polymorphic Discriminator
*/
- type: "CosmosDbSqlApiSource";
+ type: "OracleSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14558,17 +15285,23 @@ export interface CosmosDbSqlApiSource {
*/
maxConcurrentConnections?: any;
/**
- * SQL API query. Type: string (or Expression with resultType string).
+ * Oracle reader query. Type: string (or Expression with resultType string).
*/
- query?: any;
+ oracleReaderQuery?: any;
/**
- * Page size of the result. Type: integer (or Expression with resultType integer).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- pageSize?: any;
+ queryTimeout?: any;
/**
- * Preferred regions. Type: array of strings (or Expression with resultType array of strings).
+ * The partition mechanism that will be used for Oracle read in parallel. Possible values
+ * include: 'None', 'PhysicalPartitionsOfTable', 'DynamicRange'
*/
- preferredRegions?: any;
+ partitionOption?: OraclePartitionOption;
+ /**
+ * The settings that will be leveraged for Oracle source partitioning.
+ */
+ partitionSettings?: OraclePartitionSettings;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
@@ -14577,13 +15310,13 @@ export interface CosmosDbSqlApiSource {
}
/**
- * A copy activity Document Database Collection source.
+ * A copy activity Azure Data Explorer (Kusto) source.
*/
-export interface DocumentDbCollectionSource {
+export interface AzureDataExplorerSource {
/**
* Polymorphic Discriminator
*/
- type: "DocumentDbCollectionSource";
+ type: "AzureDataExplorerSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14599,16 +15332,18 @@ export interface DocumentDbCollectionSource {
*/
maxConcurrentConnections?: any;
/**
- * Documents query. Type: string (or Expression with resultType string).
+ * Database query. Should be a Kusto Query Language (KQL) query. Type: string (or Expression with
+ * resultType string).
*/
- query?: any;
+ query: any;
/**
- * Nested properties separator. Type: string (or Expression with resultType string).
+ * The name of the Boolean option that controls whether truncation is applied to result-sets that
+ * go beyond a certain row-count limit.
*/
- nestingSeparator?: any;
+ noTruncation?: any;
/**
* Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))..
*/
queryTimeout?: any;
/**
@@ -14619,13 +15354,13 @@ export interface DocumentDbCollectionSource {
}
/**
- * A copy activity Azure Blob source.
+ * A copy activity HDFS source.
*/
-export interface BlobSource {
+export interface HdfsSource {
/**
* Polymorphic Discriminator
*/
- type: "BlobSource";
+ type: "HdfsSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14640,54 +15375,25 @@ export interface BlobSource {
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
- /**
- * Treat empty as null. Type: boolean (or Expression with resultType boolean).
- */
- treatEmptyAsNull?: any;
- /**
- * Number of header lines to skip from each blob. Type: integer (or Expression with resultType
- * integer).
- */
- skipHeaderLineCount?: any;
/**
* If true, files under the folder path will be read recursively. Default is true. Type: boolean
* (or Expression with resultType boolean).
*/
recursive?: any;
-}
-
-/**
- * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with
- * unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then
- * copied into the targeted sink from the interim S3.
- */
-export interface RedshiftUnloadSettings {
- /**
- * The name of the Amazon S3 linked service which will be used for the unload operation when
- * copying from the Amazon Redshift source.
- */
- s3LinkedServiceName: LinkedServiceReference;
/**
- * The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon
- * Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type:
- * string (or Expression with resultType string).
+ * Specifies Distcp-related settings.
*/
- bucketName: any;
+ distcpSettings?: DistcpSettings;
}
/**
- * Contains the possible cases for TabularSource.
- */
-export type TabularSourceUnion = TabularSource | AmazonRedshiftSource | GoogleAdWordsSource | OracleServiceCloudSource | DynamicsAXSource | ResponsysSource | SalesforceMarketingCloudSource | VerticaSource | NetezzaSource | ZohoSource | XeroSource | SquareSource | SparkSource | ShopifySource | ServiceNowSource | QuickBooksSource | PrestoSource | PhoenixSource | PaypalSource | MarketoSource | AzureMariaDBSource | MariaDBSource | MagentoSource | JiraSource | ImpalaSource | HubspotSource | HiveSource | HBaseSource | GreenplumSource | GoogleBigQuerySource | EloquaSource | DrillSource | CouchbaseSource | ConcurSource | AzurePostgreSqlSource | AmazonMWSSource | CassandraSource | TeradataSource | AzureMySqlSource | SqlDWSource | SqlMISource | AzureSqlSource | SqlServerSource | SqlSource | SapTableSource | SapOpenHubSource | SapHanaSource | SapEccSource | SapCloudForCustomerSource | SalesforceSource | SapBwSource | SybaseSource | PostgreSqlSource | MySqlSource | OdbcSource | Db2Source | InformixSource | AzureTableSource;
-
-/**
- * Copy activity sources of tabular type.
+ * A copy activity file system source.
*/
-export interface TabularSource {
+export interface FileSystemSource {
/**
* Polymorphic Discriminator
*/
- type: "TabularSource";
+ type: "FileSystemSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14703,10 +15409,10 @@ export interface TabularSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- queryTimeout?: any;
+ recursive?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
@@ -14715,13 +15421,13 @@ export interface TabularSource {
}
/**
- * A copy activity source for Amazon Redshift Source.
+ * A copy activity Rest service source.
*/
-export interface AmazonRedshiftSource {
+export interface RestSource {
/**
* Polymorphic Discriminator
*/
- type: "AmazonRedshiftSource";
+ type: "RestSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14737,35 +15443,50 @@ export interface AmazonRedshiftSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression
+ * with resultType string).
*/
- queryTimeout?: any;
+ requestMethod?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression
+ * with resultType string).
*/
- additionalColumns?: AdditionalColumns[];
+ requestBody?: any;
/**
- * Database query. Type: string (or Expression with resultType string).
+ * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression
+ * with resultType string).
*/
- query?: any;
+ additionalHeaders?: any;
/**
- * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with
- * unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then
- * copied into the targeted sink from the interim S3.
+ * The pagination rules to compose next page requests. Type: string (or Expression with
+ * resultType string).
*/
- redshiftUnloadSettings?: RedshiftUnloadSettings;
+ paginationRules?: any;
+ /**
+ * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the
+ * timeout to read response data. Default value: 00:01:40. Type: string (or Expression with
+ * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ httpRequestTimeout?: any;
+ /**
+ * The time to await before sending next page request.
+ */
+ requestInterval?: any;
+ /**
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
+ */
+ additionalColumns?: AdditionalColumns[];
}
/**
- * A copy activity Google AdWords service source.
+ * A copy activity Salesforce Service Cloud source.
*/
-export interface GoogleAdWordsSource {
+export interface SalesforceServiceCloudSource {
/**
* Polymorphic Discriminator
*/
- type: "GoogleAdWordsSource";
+ type: "SalesforceServiceCloudSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14781,29 +15502,29 @@ export interface GoogleAdWordsSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Database query. Type: string (or Expression with resultType string).
+ */
+ query?: any;
+ /**
+ * The read behavior for the operation. Default is Query. Possible values include: 'Query',
+ * 'QueryAll'
*/
- queryTimeout?: any;
+ readBehavior?: SalesforceSourceReadBehavior;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Oracle Service Cloud source.
+ * A copy activity source for OData source.
*/
-export interface OracleServiceCloudSource {
+export interface ODataSource {
/**
* Polymorphic Discriminator
*/
- type: "OracleServiceCloudSource";
+ type: "ODataSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14819,29 +15540,30 @@ export interface OracleServiceCloudSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * OData query. For example, "$top=1". Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ query?: any;
+ /**
+ * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the
+ * timeout to read response data. Default value: 00:05:00. Type: string (or Expression with
+ * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ httpRequestTimeout?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Dynamics AX source.
+ * A copy activity source for Microsoft Access.
*/
-export interface DynamicsAXSource {
+export interface MicrosoftAccessSource {
/**
* Polymorphic Discriminator
*/
- type: "DynamicsAXSource";
+ type: "MicrosoftAccessSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14857,29 +15579,24 @@ export interface DynamicsAXSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Database query. Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ query?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Responsys source.
+ * A copy activity source for various relational databases.
*/
-export interface ResponsysSource {
+export interface RelationalSource {
/**
* Polymorphic Discriminator
*/
- type: "ResponsysSource";
+ type: "RelationalSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14895,29 +15612,24 @@ export interface ResponsysSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Database query. Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ query?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Salesforce Marketing Cloud source.
+ * A copy activity Common Data Service for Apps source.
*/
-export interface SalesforceMarketingCloudSource {
+export interface CommonDataServiceForAppsSource {
/**
* Polymorphic Discriminator
*/
- type: "SalesforceMarketingCloudSource";
+ type: "CommonDataServiceForAppsSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14933,29 +15645,25 @@ export interface SalesforceMarketingCloudSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * FetchXML is a proprietary query language that is used in Microsoft Common Data Service for
+ * Apps (online & on-premises). Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ query?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Vertica source.
+ * A copy activity Dynamics CRM source.
*/
-export interface VerticaSource {
+export interface DynamicsCrmSource {
/**
* Polymorphic Discriminator
*/
- type: "VerticaSource";
+ type: "DynamicsCrmSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -14971,50 +15679,25 @@ export interface VerticaSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * FetchXML is a proprietary query language that is used in Microsoft Dynamics CRM (online &
+ * on-premises). Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ query?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
-}
-
-/**
- * The settings that will be leveraged for Netezza source partitioning.
- */
-export interface NetezzaPartitionSettings {
- /**
- * The name of the column in integer type that will be used for proceeding range partitioning.
- * Type: string (or Expression with resultType string).
- */
- partitionColumnName?: any;
- /**
- * The maximum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
- */
- partitionUpperBound?: any;
- /**
- * The minimum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
- */
- partitionLowerBound?: any;
}
/**
- * A copy activity Netezza source.
+ * A copy activity Dynamics source.
*/
-export interface NetezzaSource {
+export interface DynamicsSource {
/**
* Polymorphic Discriminator
*/
- type: "NetezzaSource";
+ type: "DynamicsSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15030,38 +15713,25 @@ export interface NetezzaSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * FetchXML is a proprietary query language that is used in Microsoft Dynamics (online &
+ * on-premises). Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ query?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
- /**
- * The partition mechanism that will be used for Netezza read in parallel. Possible values
- * include: 'None', 'DataSlice', 'DynamicRange'
- */
- partitionOption?: NetezzaPartitionOption;
- /**
- * The settings that will be leveraged for Netezza source partitioning.
- */
- partitionSettings?: NetezzaPartitionSettings;
}
/**
- * A copy activity Zoho server source.
+ * A copy activity Azure CosmosDB (SQL API) Collection source.
*/
-export interface ZohoSource {
+export interface CosmosDbSqlApiSource {
/**
* Polymorphic Discriminator
*/
- type: "ZohoSource";
+ type: "CosmosDbSqlApiSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15077,29 +15747,32 @@ export interface ZohoSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * SQL API query. Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ query?: any;
+ /**
+ * Page size of the result. Type: integer (or Expression with resultType integer).
+ */
+ pageSize?: any;
+ /**
+ * Preferred regions. Type: array of strings (or Expression with resultType array of strings).
+ */
+ preferredRegions?: any;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Xero Service source.
+ * A copy activity Document Database Collection source.
*/
-export interface XeroSource {
+export interface DocumentDbCollectionSource {
/**
* Polymorphic Discriminator
*/
- type: "XeroSource";
+ type: "DocumentDbCollectionSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15114,6 +15787,14 @@ export interface XeroSource {
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
+ /**
+ * Documents query. Type: string (or Expression with resultType string).
+ */
+ query?: any;
+ /**
+ * Nested properties separator. Type: string (or Expression with resultType string).
+ */
+ nestingSeparator?: any;
/**
* Query timeout. Type: string (or Expression with resultType string), pattern:
* ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
@@ -15124,20 +15805,16 @@ export interface XeroSource {
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Square Service source.
+ * A copy activity Azure Blob source.
*/
-export interface SquareSource {
+export interface BlobSource {
/**
* Polymorphic Discriminator
*/
- type: "SquareSource";
+ type: "BlobSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15153,29 +15830,53 @@ export interface SquareSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Treat empty as null. Type: boolean (or Expression with resultType boolean).
*/
- queryTimeout?: any;
+ treatEmptyAsNull?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * Number of header lines to skip from each blob. Type: integer (or Expression with resultType
+ * integer).
*/
- additionalColumns?: AdditionalColumns[];
+ skipHeaderLineCount?: any;
/**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
+ * If true, files under the folder path will be read recursively. Default is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- query?: any;
+ recursive?: any;
}
/**
- * A copy activity Spark Server source.
+ * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with
+ * unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then
+ * copied into the targeted sink from the interim S3.
*/
-export interface SparkSource {
+export interface RedshiftUnloadSettings {
+ /**
+ * The name of the Amazon S3 linked service which will be used for the unload operation when
+ * copying from the Amazon Redshift source.
+ */
+ s3LinkedServiceName: LinkedServiceReference;
+ /**
+ * The bucket of the interim Amazon S3 which will be used to store the unloaded data from Amazon
+ * Redshift source. The bucket must be in the same region as the Amazon Redshift source. Type:
+ * string (or Expression with resultType string).
+ */
+ bucketName: any;
+}
+
+/**
+ * Contains the possible cases for TabularSource.
+ */
+export type TabularSourceUnion = TabularSource | AmazonRedshiftSource | GoogleAdWordsSource | OracleServiceCloudSource | DynamicsAXSource | ResponsysSource | SalesforceMarketingCloudSource | VerticaSource | NetezzaSource | ZohoSource | XeroSource | SquareSource | SparkSource | ShopifySource | ServiceNowSource | QuickBooksSource | PrestoSource | PhoenixSource | PaypalSource | MarketoSource | AzureMariaDBSource | MariaDBSource | MagentoSource | JiraSource | ImpalaSource | HubspotSource | HiveSource | HBaseSource | GreenplumSource | GoogleBigQuerySource | EloquaSource | DrillSource | CouchbaseSource | ConcurSource | AzurePostgreSqlSource | AmazonMWSSource | CassandraSource | TeradataSource | AzureMySqlSource | SqlDWSource | SqlMISource | AzureSqlSource | SqlServerSource | SqlSource | SapTableSource | SapOpenHubSource | SapHanaSource | SapEccSource | SapCloudForCustomerSource | SalesforceSource | SapBwSource | SybaseSource | PostgreSqlSource | MySqlSource | OdbcSource | Db2Source | InformixSource | AzureTableSource;
+
+/**
+ * Copy activity sources of tabular type.
+ */
+export interface TabularSource {
/**
* Polymorphic Discriminator
*/
- type: "SparkSource";
+ type: "TabularSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15200,20 +15901,16 @@ export interface SparkSource {
* Expression with resultType array of objects).
*/
additionalColumns?: AdditionalColumns[];
- /**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
- */
- query?: any;
}
/**
- * A copy activity Shopify Service source.
+ * A copy activity source for Amazon Redshift Source.
*/
-export interface ShopifySource {
+export interface AmazonRedshiftSource {
/**
* Polymorphic Discriminator
*/
- type: "ShopifySource";
+ type: "AmazonRedshiftSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15239,19 +15936,25 @@ export interface ShopifySource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * A query to retrieve data from source. Type: string (or Expression with resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
query?: any;
+ /**
+ * The Amazon S3 settings needed for the interim Amazon S3 when copying from Amazon Redshift with
+ * unload. With this, data from Amazon Redshift source will be unloaded into S3 first and then
+ * copied into the targeted sink from the interim S3.
+ */
+ redshiftUnloadSettings?: RedshiftUnloadSettings;
}
/**
- * A copy activity ServiceNow server source.
+ * A copy activity Google AdWords service source.
*/
-export interface ServiceNowSource {
+export interface GoogleAdWordsSource {
/**
* Polymorphic Discriminator
*/
- type: "ServiceNowSource";
+ type: "GoogleAdWordsSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15283,13 +15986,13 @@ export interface ServiceNowSource {
}
/**
- * A copy activity QuickBooks server source.
+ * A copy activity Oracle Service Cloud source.
*/
-export interface QuickBooksSource {
+export interface OracleServiceCloudSource {
/**
* Polymorphic Discriminator
*/
- type: "QuickBooksSource";
+ type: "OracleServiceCloudSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15321,13 +16024,13 @@ export interface QuickBooksSource {
}
/**
- * A copy activity Presto server source.
+ * A copy activity Dynamics AX source.
*/
-export interface PrestoSource {
+export interface DynamicsAXSource {
/**
* Polymorphic Discriminator
*/
- type: "PrestoSource";
+ type: "DynamicsAXSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15356,16 +16059,22 @@ export interface PrestoSource {
* A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
query?: any;
+ /**
+ * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the
+ * timeout to read response data. Default value: 00:05:00. Type: string (or Expression with
+ * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ httpRequestTimeout?: any;
}
/**
- * A copy activity Phoenix server source.
+ * A copy activity Responsys source.
*/
-export interface PhoenixSource {
+export interface ResponsysSource {
/**
* Polymorphic Discriminator
*/
- type: "PhoenixSource";
+ type: "ResponsysSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15397,13 +16106,13 @@ export interface PhoenixSource {
}
/**
- * A copy activity Paypal Service source.
+ * A copy activity Salesforce Marketing Cloud source.
*/
-export interface PaypalSource {
+export interface SalesforceMarketingCloudSource {
/**
* Polymorphic Discriminator
*/
- type: "PaypalSource";
+ type: "SalesforceMarketingCloudSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15435,13 +16144,13 @@ export interface PaypalSource {
}
/**
- * A copy activity Marketo server source.
+ * A copy activity Vertica source.
*/
-export interface MarketoSource {
+export interface VerticaSource {
/**
* Polymorphic Discriminator
*/
- type: "MarketoSource";
+ type: "VerticaSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15473,13 +16182,34 @@ export interface MarketoSource {
}
/**
- * A copy activity Azure MariaDB source.
+ * The settings that will be leveraged for Netezza source partitioning.
*/
-export interface AzureMariaDBSource {
+export interface NetezzaPartitionSettings {
+ /**
+ * The name of the column in integer type that will be used for proceeding range partitioning.
+ * Type: string (or Expression with resultType string).
+ */
+ partitionColumnName?: any;
+ /**
+ * The maximum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
+ */
+ partitionUpperBound?: any;
+ /**
+ * The minimum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
+ */
+ partitionLowerBound?: any;
+}
+
+/**
+ * A copy activity Netezza source.
+ */
+export interface NetezzaSource {
/**
* Polymorphic Discriminator
*/
- type: "AzureMariaDBSource";
+ type: "NetezzaSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15508,16 +16238,25 @@ export interface AzureMariaDBSource {
* A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
query?: any;
+ /**
+ * The partition mechanism that will be used for Netezza read in parallel. Possible values
+ * include: 'None', 'DataSlice', 'DynamicRange'
+ */
+ partitionOption?: NetezzaPartitionOption;
+ /**
+ * The settings that will be leveraged for Netezza source partitioning.
+ */
+ partitionSettings?: NetezzaPartitionSettings;
}
/**
- * A copy activity MariaDB server source.
+ * A copy activity Zoho server source.
*/
-export interface MariaDBSource {
+export interface ZohoSource {
/**
* Polymorphic Discriminator
*/
- type: "MariaDBSource";
+ type: "ZohoSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15549,13 +16288,13 @@ export interface MariaDBSource {
}
/**
- * A copy activity Magento server source.
+ * A copy activity Xero Service source.
*/
-export interface MagentoSource {
+export interface XeroSource {
/**
* Polymorphic Discriminator
*/
- type: "MagentoSource";
+ type: "XeroSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15587,13 +16326,13 @@ export interface MagentoSource {
}
/**
- * A copy activity Jira Service source.
+ * A copy activity Square Service source.
*/
-export interface JiraSource {
+export interface SquareSource {
/**
* Polymorphic Discriminator
*/
- type: "JiraSource";
+ type: "SquareSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15625,13 +16364,13 @@ export interface JiraSource {
}
/**
- * A copy activity Impala server source.
+ * A copy activity Spark Server source.
*/
-export interface ImpalaSource {
+export interface SparkSource {
/**
* Polymorphic Discriminator
*/
- type: "ImpalaSource";
+ type: "SparkSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15663,13 +16402,13 @@ export interface ImpalaSource {
}
/**
- * A copy activity Hubspot Service source.
+ * A copy activity Shopify Service source.
*/
-export interface HubspotSource {
+export interface ShopifySource {
/**
* Polymorphic Discriminator
*/
- type: "HubspotSource";
+ type: "ShopifySource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15701,13 +16440,13 @@ export interface HubspotSource {
}
/**
- * A copy activity Hive Server source.
+ * A copy activity ServiceNow server source.
*/
-export interface HiveSource {
+export interface ServiceNowSource {
/**
* Polymorphic Discriminator
*/
- type: "HiveSource";
+ type: "ServiceNowSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15739,13 +16478,13 @@ export interface HiveSource {
}
/**
- * A copy activity HBase server source.
+ * A copy activity QuickBooks server source.
*/
-export interface HBaseSource {
+export interface QuickBooksSource {
/**
* Polymorphic Discriminator
*/
- type: "HBaseSource";
+ type: "QuickBooksSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15777,13 +16516,13 @@ export interface HBaseSource {
}
/**
- * A copy activity Greenplum Database source.
+ * A copy activity Presto server source.
*/
-export interface GreenplumSource {
+export interface PrestoSource {
/**
* Polymorphic Discriminator
*/
- type: "GreenplumSource";
+ type: "PrestoSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15815,13 +16554,13 @@ export interface GreenplumSource {
}
/**
- * A copy activity Google BigQuery service source.
+ * A copy activity Phoenix server source.
*/
-export interface GoogleBigQuerySource {
+export interface PhoenixSource {
/**
* Polymorphic Discriminator
*/
- type: "GoogleBigQuerySource";
+ type: "PhoenixSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15853,13 +16592,13 @@ export interface GoogleBigQuerySource {
}
/**
- * A copy activity Eloqua server source.
+ * A copy activity Paypal Service source.
*/
-export interface EloquaSource {
+export interface PaypalSource {
/**
* Polymorphic Discriminator
*/
- type: "EloquaSource";
+ type: "PaypalSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15891,13 +16630,13 @@ export interface EloquaSource {
}
/**
- * A copy activity Drill server source.
+ * A copy activity Marketo server source.
*/
-export interface DrillSource {
+export interface MarketoSource {
/**
* Polymorphic Discriminator
*/
- type: "DrillSource";
+ type: "MarketoSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15929,13 +16668,13 @@ export interface DrillSource {
}
/**
- * A copy activity Couchbase server source.
+ * A copy activity Azure MariaDB source.
*/
-export interface CouchbaseSource {
+export interface AzureMariaDBSource {
/**
* Polymorphic Discriminator
*/
- type: "CouchbaseSource";
+ type: "AzureMariaDBSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -15967,13 +16706,13 @@ export interface CouchbaseSource {
}
/**
- * A copy activity Concur Service source.
+ * A copy activity MariaDB server source.
*/
-export interface ConcurSource {
+export interface MariaDBSource {
/**
* Polymorphic Discriminator
*/
- type: "ConcurSource";
+ type: "MariaDBSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16005,13 +16744,13 @@ export interface ConcurSource {
}
/**
- * A copy activity Azure PostgreSQL source.
+ * A copy activity Magento server source.
*/
-export interface AzurePostgreSqlSource {
+export interface MagentoSource {
/**
* Polymorphic Discriminator
*/
- type: "AzurePostgreSqlSource";
+ type: "MagentoSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16043,13 +16782,13 @@ export interface AzurePostgreSqlSource {
}
/**
- * A copy activity Amazon Marketplace Web Service source.
+ * A copy activity Jira Service source.
*/
-export interface AmazonMWSSource {
+export interface JiraSource {
/**
* Polymorphic Discriminator
*/
- type: "AmazonMWSSource";
+ type: "JiraSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16081,13 +16820,13 @@ export interface AmazonMWSSource {
}
/**
- * A copy activity source for a Cassandra database.
+ * A copy activity Impala server source.
*/
-export interface CassandraSource {
+export interface ImpalaSource {
/**
* Polymorphic Discriminator
*/
- type: "CassandraSource";
+ type: "ImpalaSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16113,50 +16852,19 @@ export interface CassandraSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command.
- * Type: string (or Expression with resultType string).
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
query?: any;
- /**
- * The consistency level specifies how many Cassandra servers must respond to a read request
- * before returning data to the client application. Cassandra checks the specified number of
- * Cassandra servers for data to satisfy the read request. Must be one of
- * cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive.
- * Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', 'LOCAL_QUORUM', 'ONE', 'TWO',
- * 'THREE', 'LOCAL_ONE', 'SERIAL', 'LOCAL_SERIAL'
- */
- consistencyLevel?: CassandraSourceReadConsistencyLevels;
-}
-
-/**
- * The settings that will be leveraged for teradata source partitioning.
- */
-export interface TeradataPartitionSettings {
- /**
- * The name of the column that will be used for proceeding range or hash partitioning. Type:
- * string (or Expression with resultType string).
- */
- partitionColumnName?: any;
- /**
- * The maximum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
- */
- partitionUpperBound?: any;
- /**
- * The minimum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
- */
- partitionLowerBound?: any;
}
/**
- * A copy activity Teradata source.
+ * A copy activity Hubspot Service source.
*/
-export interface TeradataSource {
+export interface HubspotSource {
/**
* Polymorphic Discriminator
*/
- type: "TeradataSource";
+ type: "HubspotSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16182,28 +16890,19 @@ export interface TeradataSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Teradata query. Type: string (or Expression with resultType string).
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
query?: any;
- /**
- * The partition mechanism that will be used for teradata read in parallel. Possible values
- * include: 'None', 'Hash', 'DynamicRange'
- */
- partitionOption?: TeradataPartitionOption;
- /**
- * The settings that will be leveraged for teradata source partitioning.
- */
- partitionSettings?: TeradataPartitionSettings;
}
/**
- * A copy activity Azure MySQL source.
+ * A copy activity Hive Server source.
*/
-export interface AzureMySqlSource {
+export interface HiveSource {
/**
* Polymorphic Discriminator
*/
- type: "AzureMySqlSource";
+ type: "HiveSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16229,19 +16928,19 @@ export interface AzureMySqlSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Database query. Type: string (or Expression with resultType string).
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
query?: any;
}
/**
- * A copy activity SQL Data Warehouse source.
+ * A copy activity HBase server source.
*/
-export interface SqlDWSource {
+export interface HBaseSource {
/**
* Polymorphic Discriminator
*/
- type: "SqlDWSource";
+ type: "HBaseSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16257,55 +16956,29 @@ export interface SqlDWSource {
*/
maxConcurrentConnections?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
- */
- queryTimeout?: any;
- /**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
- */
- additionalColumns?: AdditionalColumns[];
- /**
- * SQL Data Warehouse reader query. Type: string (or Expression with resultType string).
- */
- sqlReaderQuery?: any;
- /**
- * Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same
- * time as SqlReaderQuery. Type: string (or Expression with resultType string).
- */
- sqlReaderStoredProcedureName?: any;
- /**
- * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
- * type: "int"}}". Type: object (or Expression with resultType object), itemType:
- * StoredProcedureParameter.
- */
- storedProcedureParameters?: any;
-}
-
-/**
- * SQL stored procedure parameter.
- */
-export interface StoredProcedureParameter {
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ queryTimeout?: any;
/**
- * Stored procedure parameter value. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- value?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * Stored procedure parameter type. Possible values include: 'String', 'Int', 'Int64', 'Decimal',
- * 'Guid', 'Boolean', 'Date'
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- type?: StoredProcedureParameterType;
+ query?: any;
}
/**
- * A copy activity Azure SQL Managed Instance source.
+ * A copy activity Greenplum Database source.
*/
-export interface SqlMISource {
+export interface GreenplumSource {
/**
* Polymorphic Discriminator
*/
- type: "SqlMISource";
+ type: "GreenplumSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16331,33 +17004,19 @@ export interface SqlMISource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * SQL reader query. Type: string (or Expression with resultType string).
- */
- sqlReaderQuery?: any;
- /**
- * Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at
- * the same time as SqlReaderQuery. Type: string (or Expression with resultType string).
- */
- sqlReaderStoredProcedureName?: any;
- /**
- * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
- * type: "int"}}".
- */
- storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
- /**
- * Which additional types to produce.
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- produceAdditionalTypes?: any;
+ query?: any;
}
/**
- * A copy activity Azure SQL source.
+ * A copy activity Google BigQuery service source.
*/
-export interface AzureSqlSource {
+export interface GoogleBigQuerySource {
/**
* Polymorphic Discriminator
*/
- type: "AzureSqlSource";
+ type: "GoogleBigQuerySource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16383,33 +17042,19 @@ export interface AzureSqlSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * SQL reader query. Type: string (or Expression with resultType string).
- */
- sqlReaderQuery?: any;
- /**
- * Name of the stored procedure for a SQL Database source. This cannot be used at the same time
- * as SqlReaderQuery. Type: string (or Expression with resultType string).
- */
- sqlReaderStoredProcedureName?: any;
- /**
- * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
- * type: "int"}}".
- */
- storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
- /**
- * Which additional types to produce.
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- produceAdditionalTypes?: any;
+ query?: any;
}
/**
- * A copy activity SQL server source.
+ * A copy activity Eloqua server source.
*/
-export interface SqlServerSource {
+export interface EloquaSource {
/**
* Polymorphic Discriminator
*/
- type: "SqlServerSource";
+ type: "EloquaSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16435,33 +17080,19 @@ export interface SqlServerSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * SQL reader query. Type: string (or Expression with resultType string).
- */
- sqlReaderQuery?: any;
- /**
- * Name of the stored procedure for a SQL Database source. This cannot be used at the same time
- * as SqlReaderQuery. Type: string (or Expression with resultType string).
- */
- sqlReaderStoredProcedureName?: any;
- /**
- * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
- * type: "int"}}".
- */
- storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
- /**
- * Which additional types to produce.
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- produceAdditionalTypes?: any;
+ query?: any;
}
/**
- * A copy activity SQL source.
+ * A copy activity Drill server source.
*/
-export interface SqlSource {
+export interface DrillSource {
/**
* Polymorphic Discriminator
*/
- type: "SqlSource";
+ type: "DrillSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16487,61 +17118,57 @@ export interface SqlSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * SQL reader query. Type: string (or Expression with resultType string).
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- sqlReaderQuery?: any;
+ query?: any;
+}
+
+/**
+ * A copy activity Couchbase server source.
+ */
+export interface CouchbaseSource {
/**
- * Name of the stored procedure for a SQL Database source. This cannot be used at the same time
- * as SqlReaderQuery. Type: string (or Expression with resultType string).
+ * Polymorphic Discriminator
*/
- sqlReaderStoredProcedureName?: any;
+ type: "CouchbaseSource";
/**
- * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
- * type: "int"}}".
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
+ sourceRetryCount?: any;
/**
- * Specifies the transaction locking behavior for the SQL source. Allowed values:
- * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is
- * ReadCommitted. Type: string (or Expression with resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- isolationLevel?: any;
-}
-
-/**
- * The settings that will be leveraged for SAP table source partitioning.
- */
-export interface SapTablePartitionSettings {
+ sourceRetryWait?: any;
/**
- * The name of the column that will be used for proceeding range partitioning. Type: string (or
- * Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- partitionColumnName?: any;
+ maxConcurrentConnections?: any;
/**
- * The maximum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- partitionUpperBound?: any;
+ queryTimeout?: any;
/**
- * The minimum value of column specified in partitionColumnName that will be used for proceeding
- * range partitioning. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- partitionLowerBound?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The maximum value of partitions the table will be split into. Type: integer (or Expression
- * with resultType string).
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- maxPartitionsNumber?: any;
+ query?: any;
}
/**
- * A copy activity source for SAP Table source.
+ * A copy activity Concur Service source.
*/
-export interface SapTableSource {
+export interface ConcurSource {
/**
* Polymorphic Discriminator
*/
- type: "SapTableSource";
+ type: "ConcurSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16567,54 +17194,57 @@ export interface SapTableSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * The number of rows to be retrieved. Type: integer(or Expression with resultType integer).
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- rowCount?: any;
+ query?: any;
+}
+
+/**
+ * A copy activity Azure PostgreSQL source.
+ */
+export interface AzurePostgreSqlSource {
/**
- * The number of rows that will be skipped. Type: integer (or Expression with resultType
- * integer).
+ * Polymorphic Discriminator
*/
- rowSkips?: any;
+ type: "AzurePostgreSqlSource";
/**
- * The fields of the SAP table that will be retrieved. For example, column0, column1. Type:
- * string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- rfcTableFields?: any;
+ sourceRetryCount?: any;
/**
- * The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type:
- * string (or Expression with resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- rfcTableOptions?: any;
+ sourceRetryWait?: any;
/**
- * Specifies the maximum number of rows that will be retrieved at a time when retrieving data
- * from SAP Table. Type: integer (or Expression with resultType integer).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- batchSize?: any;
+ maxConcurrentConnections?: any;
/**
- * Specifies the custom RFC function module that will be used to read data from SAP Table. Type:
- * string (or Expression with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- customRfcReadTableFunctionModule?: any;
+ queryTimeout?: any;
/**
- * The partition mechanism that will be used for SAP table read in parallel. Possible values
- * include: 'None', 'PartitionOnInt', 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth',
- * 'PartitionOnCalendarDate', 'PartitionOnTime'
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- partitionOption?: SapTablePartitionOption;
+ additionalColumns?: AdditionalColumns[];
/**
- * The settings that will be leveraged for SAP table source partitioning.
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- partitionSettings?: SapTablePartitionSettings;
+ query?: any;
}
/**
- * A copy activity source for SAP Business Warehouse Open Hub Destination source.
+ * A copy activity Amazon Marketplace Web Service source.
*/
-export interface SapOpenHubSource {
+export interface AmazonMWSSource {
/**
* Polymorphic Discriminator
*/
- type: "SapOpenHubSource";
+ type: "AmazonMWSSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16640,37 +17270,19 @@ export interface SapOpenHubSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Whether to exclude the records of the last request. The default value is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- excludeLastRequest?: any;
- /**
- * The ID of request for delta loading. Once it is set, only data with requestId larger than the
- * value of this property will be retrieved. The default value is 0. Type: integer (or Expression
- * with resultType integer ).
- */
- baseRequestId?: any;
-}
-
-/**
- * The settings that will be leveraged for SAP HANA source partitioning.
- */
-export interface SapHanaPartitionSettings {
- /**
- * The name of the column that will be used for proceeding range partitioning. Type: string (or
- * Expression with resultType string).
+ * A query to retrieve data from source. Type: string (or Expression with resultType string).
*/
- partitionColumnName?: any;
+ query?: any;
}
/**
- * A copy activity source for SAP HANA source.
+ * A copy activity source for a Cassandra database.
*/
-export interface SapHanaSource {
+export interface CassandraSource {
/**
* Polymorphic Discriminator
*/
- type: "SapHanaSource";
+ type: "CassandraSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16696,33 +17308,50 @@ export interface SapHanaSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * SAP HANA Sql query. Type: string (or Expression with resultType string).
+ * Database query. Should be a SQL-92 query expression or Cassandra Query Language (CQL) command.
+ * Type: string (or Expression with resultType string).
*/
query?: any;
/**
- * The packet size of data read from SAP HANA. Type: integer(or Expression with resultType
- * integer).
+ * The consistency level specifies how many Cassandra servers must respond to a read request
+ * before returning data to the client application. Cassandra checks the specified number of
+ * Cassandra servers for data to satisfy the read request. Must be one of
+ * cassandraSourceReadConsistencyLevels. The default value is 'ONE'. It is case-insensitive.
+ * Possible values include: 'ALL', 'EACH_QUORUM', 'QUORUM', 'LOCAL_QUORUM', 'ONE', 'TWO',
+ * 'THREE', 'LOCAL_ONE', 'SERIAL', 'LOCAL_SERIAL'
+ */
+ consistencyLevel?: CassandraSourceReadConsistencyLevels;
+}
+
+/**
+ * The settings that will be leveraged for teradata source partitioning.
+ */
+export interface TeradataPartitionSettings {
+ /**
+ * The name of the column that will be used for proceeding range or hash partitioning. Type:
+ * string (or Expression with resultType string).
*/
- packetSize?: any;
+ partitionColumnName?: any;
/**
- * The partition mechanism that will be used for SAP HANA read in parallel. Possible values
- * include: 'None', 'PhysicalPartitionsOfTable', 'SapHanaDynamicRange'
+ * The maximum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
*/
- partitionOption?: SapHanaPartitionOption;
+ partitionUpperBound?: any;
/**
- * The settings that will be leveraged for SAP HANA source partitioning.
+ * The minimum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
*/
- partitionSettings?: SapHanaPartitionSettings;
+ partitionLowerBound?: any;
}
/**
- * A copy activity source for SAP ECC source.
+ * A copy activity Teradata source.
*/
-export interface SapEccSource {
+export interface TeradataSource {
/**
* Polymorphic Discriminator
*/
- type: "SapEccSource";
+ type: "TeradataSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16748,20 +17377,28 @@ export interface SapEccSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType
- * string).
+ * Teradata query. Type: string (or Expression with resultType string).
*/
query?: any;
+ /**
+ * The partition mechanism that will be used for teradata read in parallel. Possible values
+ * include: 'None', 'Hash', 'DynamicRange'
+ */
+ partitionOption?: TeradataPartitionOption;
+ /**
+ * The settings that will be leveraged for teradata source partitioning.
+ */
+ partitionSettings?: TeradataPartitionSettings;
}
/**
- * A copy activity source for SAP Cloud for Customer source.
+ * A copy activity Azure MySQL source.
*/
-export interface SapCloudForCustomerSource {
+export interface AzureMySqlSource {
/**
* Polymorphic Discriminator
*/
- type: "SapCloudForCustomerSource";
+ type: "AzureMySqlSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16787,20 +17424,19 @@ export interface SapCloudForCustomerSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with
- * resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
query?: any;
}
/**
- * A copy activity Salesforce source.
+ * A copy activity SQL Data Warehouse source.
*/
-export interface SalesforceSource {
+export interface SqlDWSource {
/**
* Polymorphic Discriminator
*/
- type: "SalesforceSource";
+ type: "SqlDWSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16826,24 +17462,45 @@ export interface SalesforceSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Database query. Type: string (or Expression with resultType string).
+ * SQL Data Warehouse reader query. Type: string (or Expression with resultType string).
*/
- query?: any;
+ sqlReaderQuery?: any;
/**
- * The read behavior for the operation. Default is Query. Possible values include: 'Query',
- * 'QueryAll'
+ * Name of the stored procedure for a SQL Data Warehouse source. This cannot be used at the same
+ * time as SqlReaderQuery. Type: string (or Expression with resultType string).
*/
- readBehavior?: SalesforceSourceReadBehavior;
+ sqlReaderStoredProcedureName?: any;
+ /**
+ * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
+ * type: "int"}}". Type: object (or Expression with resultType object), itemType:
+ * StoredProcedureParameter.
+ */
+ storedProcedureParameters?: any;
}
/**
- * A copy activity source for SapBW server via MDX.
+ * SQL stored procedure parameter.
*/
-export interface SapBwSource {
+export interface StoredProcedureParameter {
+ /**
+ * Stored procedure parameter value. Type: string (or Expression with resultType string).
+ */
+ value?: any;
+ /**
+ * Stored procedure parameter type. Possible values include: 'String', 'Int', 'Int64', 'Decimal',
+ * 'Guid', 'Boolean', 'Date'
+ */
+ type?: StoredProcedureParameterType;
+}
+
+/**
+ * A copy activity Azure SQL Managed Instance source.
+ */
+export interface SqlMISource {
/**
* Polymorphic Discriminator
*/
- type: "SapBwSource";
+ type: "SqlMISource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16869,19 +17526,33 @@ export interface SapBwSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * MDX query. Type: string (or Expression with resultType string).
+ * SQL reader query. Type: string (or Expression with resultType string).
*/
- query?: any;
+ sqlReaderQuery?: any;
+ /**
+ * Name of the stored procedure for a Azure SQL Managed Instance source. This cannot be used at
+ * the same time as SqlReaderQuery. Type: string (or Expression with resultType string).
+ */
+ sqlReaderStoredProcedureName?: any;
+ /**
+ * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
+ * type: "int"}}".
+ */
+ storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
+ /**
+ * Which additional types to produce.
+ */
+ produceAdditionalTypes?: any;
}
/**
- * A copy activity source for Sybase databases.
+ * A copy activity Azure SQL source.
*/
-export interface SybaseSource {
+export interface AzureSqlSource {
/**
* Polymorphic Discriminator
*/
- type: "SybaseSource";
+ type: "AzureSqlSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16907,19 +17578,33 @@ export interface SybaseSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Database query. Type: string (or Expression with resultType string).
+ * SQL reader query. Type: string (or Expression with resultType string).
*/
- query?: any;
+ sqlReaderQuery?: any;
+ /**
+ * Name of the stored procedure for a SQL Database source. This cannot be used at the same time
+ * as SqlReaderQuery. Type: string (or Expression with resultType string).
+ */
+ sqlReaderStoredProcedureName?: any;
+ /**
+ * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
+ * type: "int"}}".
+ */
+ storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
+ /**
+ * Which additional types to produce.
+ */
+ produceAdditionalTypes?: any;
}
/**
- * A copy activity source for PostgreSQL databases.
+ * A copy activity SQL server source.
*/
-export interface PostgreSqlSource {
+export interface SqlServerSource {
/**
* Polymorphic Discriminator
*/
- type: "PostgreSqlSource";
+ type: "SqlServerSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16945,19 +17630,33 @@ export interface PostgreSqlSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Database query. Type: string (or Expression with resultType string).
+ * SQL reader query. Type: string (or Expression with resultType string).
*/
- query?: any;
+ sqlReaderQuery?: any;
+ /**
+ * Name of the stored procedure for a SQL Database source. This cannot be used at the same time
+ * as SqlReaderQuery. Type: string (or Expression with resultType string).
+ */
+ sqlReaderStoredProcedureName?: any;
+ /**
+ * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
+ * type: "int"}}".
+ */
+ storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
+ /**
+ * Which additional types to produce.
+ */
+ produceAdditionalTypes?: any;
}
/**
- * A copy activity source for MySQL databases.
+ * A copy activity SQL source.
*/
-export interface MySqlSource {
+export interface SqlSource {
/**
* Polymorphic Discriminator
*/
- type: "MySqlSource";
+ type: "SqlSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -16983,57 +17682,61 @@ export interface MySqlSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Database query. Type: string (or Expression with resultType string).
+ * SQL reader query. Type: string (or Expression with resultType string).
*/
- query?: any;
-}
-
-/**
- * A copy activity source for ODBC databases.
- */
-export interface OdbcSource {
+ sqlReaderQuery?: any;
/**
- * Polymorphic Discriminator
+ * Name of the stored procedure for a SQL Database source. This cannot be used at the same time
+ * as SqlReaderQuery. Type: string (or Expression with resultType string).
*/
- type: "OdbcSource";
+ sqlReaderStoredProcedureName?: any;
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * Value and type setting for stored procedure parameters. Example: "{Parameter1: {value: "1",
+ * type: "int"}}".
*/
- sourceRetryCount?: any;
+ storedProcedureParameters?: { [propertyName: string]: StoredProcedureParameter };
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Specifies the transaction locking behavior for the SQL source. Allowed values:
+ * ReadCommitted/ReadUncommitted/RepeatableRead/Serializable/Snapshot. The default value is
+ * ReadCommitted. Type: string (or Expression with resultType string).
*/
- sourceRetryWait?: any;
+ isolationLevel?: any;
+}
+
+/**
+ * The settings that will be leveraged for SAP table source partitioning.
+ */
+export interface SapTablePartitionSettings {
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * The name of the column that will be used for proceeding range partitioning. Type: string (or
+ * Expression with resultType string).
*/
- maxConcurrentConnections?: any;
+ partitionColumnName?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * The maximum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ partitionUpperBound?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * The minimum value of column specified in partitionColumnName that will be used for proceeding
+ * range partitioning. Type: string (or Expression with resultType string).
*/
- additionalColumns?: AdditionalColumns[];
+ partitionLowerBound?: any;
/**
- * Database query. Type: string (or Expression with resultType string).
+ * The maximum value of partitions the table will be split into. Type: integer (or Expression
+ * with resultType string).
*/
- query?: any;
+ maxPartitionsNumber?: any;
}
/**
- * A copy activity source for Db2 databases.
+ * A copy activity source for SAP Table source.
*/
-export interface Db2Source {
+export interface SapTableSource {
/**
* Polymorphic Discriminator
*/
- type: "Db2Source";
+ type: "SapTableSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -17059,57 +17762,54 @@ export interface Db2Source {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Database query. Type: string (or Expression with resultType string).
+ * The number of rows to be retrieved. Type: integer(or Expression with resultType integer).
*/
- query?: any;
-}
-
-/**
- * A copy activity source for Informix.
- */
-export interface InformixSource {
+ rowCount?: any;
/**
- * Polymorphic Discriminator
+ * The number of rows that will be skipped. Type: integer (or Expression with resultType
+ * integer).
*/
- type: "InformixSource";
+ rowSkips?: any;
/**
- * Source retry count. Type: integer (or Expression with resultType integer).
+ * The fields of the SAP table that will be retrieved. For example, column0, column1. Type:
+ * string (or Expression with resultType string).
*/
- sourceRetryCount?: any;
+ rfcTableFields?: any;
/**
- * Source retry wait. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * The options for the filtering of the SAP Table. For example, COLUMN0 EQ SOME VALUE. Type:
+ * string (or Expression with resultType string).
*/
- sourceRetryWait?: any;
+ rfcTableOptions?: any;
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * Specifies the maximum number of rows that will be retrieved at a time when retrieving data
+ * from SAP Table. Type: integer (or Expression with resultType integer).
*/
- maxConcurrentConnections?: any;
+ batchSize?: any;
/**
- * Query timeout. Type: string (or Expression with resultType string), pattern:
- * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ * Specifies the custom RFC function module that will be used to read data from SAP Table. Type:
+ * string (or Expression with resultType string).
*/
- queryTimeout?: any;
+ customRfcReadTableFunctionModule?: any;
/**
- * Specifies the additional columns to be added to source data. Type: array of objects (or
- * Expression with resultType array of objects).
+ * The partition mechanism that will be used for SAP table read in parallel. Possible values
+ * include: 'None', 'PartitionOnInt', 'PartitionOnCalendarYear', 'PartitionOnCalendarMonth',
+ * 'PartitionOnCalendarDate', 'PartitionOnTime'
*/
- additionalColumns?: AdditionalColumns[];
+ partitionOption?: SapTablePartitionOption;
/**
- * Database query. Type: string (or Expression with resultType string).
+ * The settings that will be leveraged for SAP table source partitioning.
*/
- query?: any;
+ partitionSettings?: SapTablePartitionSettings;
}
/**
- * A copy activity Azure Table source.
+ * A copy activity source for SAP Business Warehouse Open Hub Destination source.
*/
-export interface AzureTableSource {
+export interface SapOpenHubSource {
/**
* Polymorphic Discriminator
*/
- type: "AzureTableSource";
+ type: "SapOpenHubSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -17135,528 +17835,521 @@ export interface AzureTableSource {
*/
additionalColumns?: AdditionalColumns[];
/**
- * Azure Table source query. Type: string (or Expression with resultType string).
+ * Whether to exclude the records of the last request. The default value is true. Type: boolean
+ * (or Expression with resultType boolean).
*/
- azureTableSourceQuery?: any;
+ excludeLastRequest?: any;
/**
- * Azure Table source ignore table not found. Type: boolean (or Expression with resultType
- * boolean).
+ * The ID of request for delta loading. Once it is set, only data with requestId larger than the
+ * value of this property will be retrieved. The default value is 0. Type: integer (or Expression
+ * with resultType integer ).
*/
- azureTableSourceIgnoreTableNotFound?: any;
+ baseRequestId?: any;
}
/**
- * Contains the possible cases for StoreReadSettings.
- */
-export type StoreReadSettingsUnion = StoreReadSettings | HdfsReadSettings | HttpReadSettings | SftpReadSettings | FtpReadSettings | GoogleCloudStorageReadSettings | AzureFileStorageReadSettings | FileServerReadSettings | AmazonS3ReadSettings | AzureDataLakeStoreReadSettings | AzureBlobFSReadSettings | AzureBlobStorageReadSettings;
-
-/**
- * Connector read setting.
+ * The settings that will be leveraged for SAP HANA source partitioning.
*/
-export interface StoreReadSettings {
- /**
- * Polymorphic Discriminator
- */
- type: "StoreReadSettings";
- /**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
- */
- maxConcurrentConnections?: any;
+export interface SapHanaPartitionSettings {
/**
- * Describes unknown properties. The value of an unknown property can be of "any" type.
+ * The name of the column that will be used for proceeding range partitioning. Type: string (or
+ * Expression with resultType string).
*/
- [property: string]: any;
+ partitionColumnName?: any;
}
/**
- * HDFS read settings.
+ * A copy activity source for SAP HANA source.
*/
-export interface HdfsReadSettings {
+export interface SapHanaSource {
/**
* Polymorphic Discriminator
*/
- type: "HdfsReadSettings";
+ type: "SapHanaSource";
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- maxConcurrentConnections?: any;
+ sourceRetryCount?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- recursive?: any;
+ sourceRetryWait?: any;
/**
- * HDFS wildcardFolderPath. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- wildcardFolderPath?: any;
+ maxConcurrentConnections?: any;
/**
- * HDFS wildcardFileName. Type: string (or Expression with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFileName?: any;
+ queryTimeout?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- fileListPath?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * Indicates whether to enable partition discovery.
+ * SAP HANA Sql query. Type: string (or Expression with resultType string).
*/
- enablePartitionDiscovery?: boolean;
+ query?: any;
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * The packet size of data read from SAP HANA. Type: integer(or Expression with resultType
+ * integer).
*/
- modifiedDatetimeStart?: any;
+ packetSize?: any;
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * The partition mechanism that will be used for SAP HANA read in parallel. Possible values
+ * include: 'None', 'PhysicalPartitionsOfTable', 'SapHanaDynamicRange'
*/
- modifiedDatetimeEnd?: any;
+ partitionOption?: SapHanaPartitionOption;
/**
- * Specifies Distcp-related settings.
+ * The settings that will be leveraged for SAP HANA source partitioning.
*/
- distcpSettings?: DistcpSettings;
+ partitionSettings?: SapHanaPartitionSettings;
}
/**
- * Sftp read settings.
+ * A copy activity source for SAP ECC source.
*/
-export interface HttpReadSettings {
+export interface SapEccSource {
/**
* Polymorphic Discriminator
*/
- type: "HttpReadSettings";
+ type: "SapEccSource";
+ /**
+ * Source retry count. Type: integer (or Expression with resultType integer).
+ */
+ sourceRetryCount?: any;
+ /**
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ sourceRetryWait?: any;
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * The HTTP method used to call the RESTful API. The default is GET. Type: string (or Expression
- * with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- requestMethod?: any;
+ queryTimeout?: any;
/**
- * The HTTP request body to the RESTful API if requestMethod is POST. Type: string (or Expression
- * with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- requestBody?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The additional HTTP headers in the request to the RESTful API. Type: string (or Expression
- * with resultType string).
+ * SAP ECC OData query. For example, "$top=1". Type: string (or Expression with resultType
+ * string).
*/
- additionalHeaders?: any;
+ query?: any;
/**
- * Specifies the timeout for a HTTP client to get HTTP response from HTTP server.
+ * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the
+ * timeout to read response data. Default value: 00:05:00. Type: string (or Expression with
+ * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- requestTimeout?: any;
+ httpRequestTimeout?: any;
}
/**
- * Sftp read settings.
+ * A copy activity source for SAP Cloud for Customer source.
*/
-export interface SftpReadSettings {
+export interface SapCloudForCustomerSource {
/**
* Polymorphic Discriminator
*/
- type: "SftpReadSettings";
+ type: "SapCloudForCustomerSource";
/**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- maxConcurrentConnections?: any;
+ sourceRetryCount?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- recursive?: any;
+ sourceRetryWait?: any;
/**
- * Sftp wildcardFolderPath. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- wildcardFolderPath?: any;
+ maxConcurrentConnections?: any;
/**
- * Sftp wildcardFileName. Type: string (or Expression with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFileName?: any;
+ queryTimeout?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- fileListPath?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * SAP Cloud for Customer OData query. For example, "$top=1". Type: string (or Expression with
+ * resultType string).
*/
- modifiedDatetimeStart?: any;
+ query?: any;
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the
+ * timeout to read response data. Default value: 00:05:00. Type: string (or Expression with
+ * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- modifiedDatetimeEnd?: any;
+ httpRequestTimeout?: any;
}
/**
- * Ftp read settings.
+ * A copy activity Salesforce source.
*/
-export interface FtpReadSettings {
+export interface SalesforceSource {
/**
* Polymorphic Discriminator
*/
- type: "FtpReadSettings";
+ type: "SalesforceSource";
+ /**
+ * Source retry count. Type: integer (or Expression with resultType integer).
+ */
+ sourceRetryCount?: any;
+ /**
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ sourceRetryWait?: any;
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
- /**
- * Ftp wildcardFolderPath. Type: string (or Expression with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFolderPath?: any;
+ queryTimeout?: any;
/**
- * Ftp wildcardFileName. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- wildcardFileName?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
- fileListPath?: any;
+ query?: any;
/**
- * Specify whether to use binary transfer mode for FTP stores.
+ * The read behavior for the operation. Default is Query. Possible values include: 'Query',
+ * 'QueryAll'
*/
- useBinaryTransfer?: boolean;
+ readBehavior?: SalesforceSourceReadBehavior;
}
/**
- * Google Cloud Storage read settings.
+ * A copy activity source for SapBW server via MDX.
*/
-export interface GoogleCloudStorageReadSettings {
+export interface SapBwSource {
/**
* Polymorphic Discriminator
*/
- type: "GoogleCloudStorageReadSettings";
- /**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
- */
- maxConcurrentConnections?: any;
- /**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
- /**
- * Google Cloud Storage wildcardFolderPath. Type: string (or Expression with resultType string).
- */
- wildcardFolderPath?: any;
+ type: "SapBwSource";
/**
- * Google Cloud Storage wildcardFileName. Type: string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- wildcardFileName?: any;
+ sourceRetryCount?: any;
/**
- * The prefix filter for the Google Cloud Storage object name. Type: string (or Expression with
- * resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- prefix?: any;
+ sourceRetryWait?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- fileListPath?: any;
+ maxConcurrentConnections?: any;
/**
- * Indicates whether to enable partition discovery.
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- enablePartitionDiscovery?: boolean;
+ queryTimeout?: any;
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- modifiedDatetimeStart?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * MDX query. Type: string (or Expression with resultType string).
*/
- modifiedDatetimeEnd?: any;
+ query?: any;
}
/**
- * Azure File Storage read settings.
+ * A copy activity source for Sybase databases.
*/
-export interface AzureFileStorageReadSettings {
+export interface SybaseSource {
/**
* Polymorphic Discriminator
*/
- type: "AzureFileStorageReadSettings";
- /**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
- */
- maxConcurrentConnections?: any;
- /**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
+ type: "SybaseSource";
/**
- * Azure File Storage wildcardFolderPath. Type: string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- wildcardFolderPath?: any;
+ sourceRetryCount?: any;
/**
- * Azure File Storage wildcardFileName. Type: string (or Expression with resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFileName?: any;
+ sourceRetryWait?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- fileListPath?: any;
+ maxConcurrentConnections?: any;
/**
- * Indicates whether to enable partition discovery.
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- enablePartitionDiscovery?: boolean;
+ queryTimeout?: any;
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- modifiedDatetimeStart?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
- modifiedDatetimeEnd?: any;
+ query?: any;
}
/**
- * File server read settings.
+ * A copy activity source for PostgreSQL databases.
*/
-export interface FileServerReadSettings {
+export interface PostgreSqlSource {
/**
* Polymorphic Discriminator
*/
- type: "FileServerReadSettings";
- /**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
- */
- maxConcurrentConnections?: any;
- /**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
+ type: "PostgreSqlSource";
/**
- * FileServer wildcardFolderPath. Type: string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- wildcardFolderPath?: any;
+ sourceRetryCount?: any;
/**
- * FileServer wildcardFileName. Type: string (or Expression with resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFileName?: any;
+ sourceRetryWait?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- fileListPath?: any;
+ maxConcurrentConnections?: any;
/**
- * Indicates whether to enable partition discovery.
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- enablePartitionDiscovery?: boolean;
+ queryTimeout?: any;
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- modifiedDatetimeStart?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
- modifiedDatetimeEnd?: any;
+ query?: any;
}
/**
- * Azure data lake store read settings.
+ * A copy activity source for MySQL databases.
*/
-export interface AmazonS3ReadSettings {
+export interface MySqlSource {
/**
* Polymorphic Discriminator
*/
- type: "AmazonS3ReadSettings";
+ type: "MySqlSource";
+ /**
+ * Source retry count. Type: integer (or Expression with resultType integer).
+ */
+ sourceRetryCount?: any;
+ /**
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ sourceRetryWait?: any;
/**
* The maximum concurrent connection count for the source data store. Type: integer (or
* Expression with resultType integer).
*/
maxConcurrentConnections?: any;
/**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- recursive?: any;
+ queryTimeout?: any;
/**
- * AmazonS3 wildcardFolderPath. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- wildcardFolderPath?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * AmazonS3 wildcardFileName. Type: string (or Expression with resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
- wildcardFileName?: any;
+ query?: any;
+}
+
+/**
+ * A copy activity source for ODBC databases.
+ */
+export interface OdbcSource {
/**
- * The prefix filter for the S3 object name. Type: string (or Expression with resultType string).
+ * Polymorphic Discriminator
*/
- prefix?: any;
+ type: "OdbcSource";
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- fileListPath?: any;
+ sourceRetryCount?: any;
/**
- * Indicates whether to enable partition discovery.
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- enablePartitionDiscovery?: boolean;
+ sourceRetryWait?: any;
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- modifiedDatetimeStart?: any;
+ maxConcurrentConnections?: any;
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- modifiedDatetimeEnd?: any;
+ queryTimeout?: any;
+ /**
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
+ */
+ additionalColumns?: AdditionalColumns[];
+ /**
+ * Database query. Type: string (or Expression with resultType string).
+ */
+ query?: any;
}
/**
- * Azure data lake store read settings.
+ * A copy activity source for Db2 databases.
*/
-export interface AzureDataLakeStoreReadSettings {
+export interface Db2Source {
/**
* Polymorphic Discriminator
*/
- type: "AzureDataLakeStoreReadSettings";
- /**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
- */
- maxConcurrentConnections?: any;
- /**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
+ type: "Db2Source";
/**
- * ADLS wildcardFolderPath. Type: string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- wildcardFolderPath?: any;
+ sourceRetryCount?: any;
/**
- * ADLS wildcardFileName. Type: string (or Expression with resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFileName?: any;
+ sourceRetryWait?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- fileListPath?: any;
+ maxConcurrentConnections?: any;
/**
- * Indicates whether to enable partition discovery.
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- enablePartitionDiscovery?: boolean;
+ queryTimeout?: any;
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- modifiedDatetimeStart?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
- modifiedDatetimeEnd?: any;
+ query?: any;
}
/**
- * Azure blobFS read settings.
+ * A copy activity source for Informix.
*/
-export interface AzureBlobFSReadSettings {
+export interface InformixSource {
/**
* Polymorphic Discriminator
*/
- type: "AzureBlobFSReadSettings";
- /**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
- */
- maxConcurrentConnections?: any;
- /**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
+ type: "InformixSource";
/**
- * Azure blobFS wildcardFolderPath. Type: string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- wildcardFolderPath?: any;
+ sourceRetryCount?: any;
/**
- * Azure blobFS wildcardFileName. Type: string (or Expression with resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFileName?: any;
+ sourceRetryWait?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- fileListPath?: any;
+ maxConcurrentConnections?: any;
/**
- * Indicates whether to enable partition discovery.
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- enablePartitionDiscovery?: boolean;
+ queryTimeout?: any;
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- modifiedDatetimeStart?: any;
+ additionalColumns?: AdditionalColumns[];
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * Database query. Type: string (or Expression with resultType string).
*/
- modifiedDatetimeEnd?: any;
+ query?: any;
}
/**
- * Azure blob read settings.
+ * A copy activity Azure Table source.
*/
-export interface AzureBlobStorageReadSettings {
+export interface AzureTableSource {
/**
* Polymorphic Discriminator
*/
- type: "AzureBlobStorageReadSettings";
- /**
- * The maximum concurrent connection count for the source data store. Type: integer (or
- * Expression with resultType integer).
- */
- maxConcurrentConnections?: any;
- /**
- * If true, files under the folder path will be read recursively. Default is true. Type: boolean
- * (or Expression with resultType boolean).
- */
- recursive?: any;
+ type: "AzureTableSource";
/**
- * Azure blob wildcardFolderPath. Type: string (or Expression with resultType string).
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- wildcardFolderPath?: any;
+ sourceRetryCount?: any;
/**
- * Azure blob wildcardFileName. Type: string (or Expression with resultType string).
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- wildcardFileName?: any;
+ sourceRetryWait?: any;
/**
- * The prefix filter for the Azure Blob name. Type: string (or Expression with resultType
- * string).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- prefix?: any;
+ maxConcurrentConnections?: any;
/**
- * Point to a text file that lists each file (relative path to the path configured in the
- * dataset) that you want to copy. Type: string (or Expression with resultType string).
+ * Query timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- fileListPath?: any;
+ queryTimeout?: any;
/**
- * Indicates whether to enable partition discovery.
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
*/
- enablePartitionDiscovery?: boolean;
+ additionalColumns?: AdditionalColumns[];
/**
- * The start of file's modified datetime. Type: string (or Expression with resultType string).
+ * Azure Table source query. Type: string (or Expression with resultType string).
*/
- modifiedDatetimeStart?: any;
+ azureTableSourceQuery?: any;
/**
- * The end of file's modified datetime. Type: string (or Expression with resultType string).
+ * Azure Table source ignore table not found. Type: boolean (or Expression with resultType
+ * boolean).
*/
- modifiedDatetimeEnd?: any;
+ azureTableSourceIgnoreTableNotFound?: any;
}
/**
@@ -17685,6 +18378,10 @@ export interface BinarySource {
* Binary store settings.
*/
storeSettings?: StoreReadSettingsUnion;
+ /**
+ * Binary format settings.
+ */
+ formatSettings?: BinaryReadSettings;
}
/**
@@ -17721,13 +18418,13 @@ export interface OrcSource {
}
/**
- * A copy activity Json source.
+ * A copy activity Xml source.
*/
-export interface JsonSource {
+export interface XmlSource {
/**
* Polymorphic Discriminator
*/
- type: "JsonSource";
+ type: "XmlSource";
/**
* Source retry count. Type: integer (or Expression with resultType integer).
*/
@@ -17743,9 +18440,13 @@ export interface JsonSource {
*/
maxConcurrentConnections?: any;
/**
- * Json store settings.
+ * Xml store settings.
*/
storeSettings?: StoreReadSettingsUnion;
+ /**
+ * Xml format settings.
+ */
+ formatSettings?: XmlReadSettings;
/**
* Specifies the additional columns to be added to source data. Type: array of objects (or
* Expression with resultType array of objects).
@@ -17754,37 +18455,40 @@ export interface JsonSource {
}
/**
- * Contains the possible cases for FormatReadSettings.
- */
-export type FormatReadSettingsUnion = FormatReadSettings | DelimitedTextReadSettings;
-
-/**
- * Format read settings.
+ * A copy activity Json source.
*/
-export interface FormatReadSettings {
+export interface JsonSource {
/**
* Polymorphic Discriminator
*/
- type: "FormatReadSettings";
+ type: "JsonSource";
/**
- * Describes unknown properties. The value of an unknown property can be of "any" type.
+ * Source retry count. Type: integer (or Expression with resultType integer).
*/
- [property: string]: any;
-}
-
-/**
- * Delimited text read settings.
- */
-export interface DelimitedTextReadSettings {
+ sourceRetryCount?: any;
/**
- * Polymorphic Discriminator
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
*/
- type: "DelimitedTextReadSettings";
+ sourceRetryWait?: any;
/**
- * Indicates the number of non-empty rows to skip when reading data from input files. Type:
- * integer (or Expression with resultType integer).
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
*/
- skipLineCount?: any;
+ maxConcurrentConnections?: any;
+ /**
+ * Json store settings.
+ */
+ storeSettings?: StoreReadSettingsUnion;
+ /**
+ * Json format settings.
+ */
+ formatSettings?: JsonReadSettings;
+ /**
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
+ */
+ additionalColumns?: AdditionalColumns[];
}
/**
@@ -17857,6 +18561,39 @@ export interface ParquetSource {
additionalColumns?: AdditionalColumns[];
}
+/**
+ * A copy activity excel source.
+ */
+export interface ExcelSource {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "ExcelSource";
+ /**
+ * Source retry count. Type: integer (or Expression with resultType integer).
+ */
+ sourceRetryCount?: any;
+ /**
+ * Source retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ sourceRetryWait?: any;
+ /**
+ * The maximum concurrent connection count for the source data store. Type: integer (or
+ * Expression with resultType integer).
+ */
+ maxConcurrentConnections?: any;
+ /**
+ * Excel store settings.
+ */
+ storeSettings?: StoreReadSettingsUnion;
+ /**
+ * Specifies the additional columns to be added to source data. Type: array of objects (or
+ * Expression with resultType array of objects).
+ */
+ additionalColumns?: AdditionalColumns[];
+}
+
/**
* A copy activity Avro source.
*/
@@ -18054,6 +18791,10 @@ export interface DeleteActivity {
* Delete activity dataset reference.
*/
dataset: DatasetReference;
+ /**
+ * Delete activity store settings.
+ */
+ storeSettings?: StoreReadSettingsUnion;
}
/**
@@ -18286,7 +19027,8 @@ export interface SSISPackageLocation {
*/
packagePath?: any;
/**
- * The type of SSIS package location. Possible values include: 'SSISDB', 'File', 'InlinePackage'
+ * The type of SSIS package location. Possible values include: 'SSISDB', 'File', 'InlinePackage',
+ * 'PackageStore'
*/
type?: SsisPackageLocationType;
/**
@@ -18302,6 +19044,10 @@ export interface SSISPackageLocation {
* string).
*/
configurationPath?: any;
+ /**
+ * The configuration file access credential.
+ */
+ configurationAccessCredential?: SSISAccessCredential;
/**
* The package name.
*/
@@ -18813,7 +19559,7 @@ export interface StagingSettings {
/**
* Contains the possible cases for CopySink.
*/
-export type CopySinkUnion = CopySink | CosmosDbMongoDbApiSink | SalesforceServiceCloudSink | SalesforceSink | AzureDataExplorerSink | CommonDataServiceForAppsSink | DynamicsCrmSink | DynamicsSink | MicrosoftAccessSink | InformixSink | OdbcSink | AzureSearchIndexSink | AzureBlobFSSink | AzureDataLakeStoreSink | OracleSink | SqlDWSink | SqlMISink | AzureSqlSink | SqlServerSink | SqlSink | CosmosDbSqlApiSink | DocumentDbCollectionSink | FileSystemSink | BlobSink | BinarySink | ParquetSink | AvroSink | AzureTableSink | AzureQueueSink | SapCloudForCustomerSink | AzureMySqlSink | AzurePostgreSqlSink | OrcSink | JsonSink | DelimitedTextSink;
+export type CopySinkUnion = CopySink | CosmosDbMongoDbApiSink | SalesforceServiceCloudSink | SalesforceSink | AzureDataExplorerSink | CommonDataServiceForAppsSink | DynamicsCrmSink | DynamicsSink | MicrosoftAccessSink | InformixSink | OdbcSink | AzureSearchIndexSink | AzureBlobFSSink | AzureDataLakeStoreSink | OracleSink | SnowflakeSink | SqlDWSink | SqlMISink | AzureSqlSink | SqlServerSink | SqlSink | CosmosDbSqlApiSink | DocumentDbCollectionSink | FileSystemSink | BlobSink | BinarySink | ParquetSink | AvroSink | AzureTableSink | AzureQueueSink | SapCloudForCustomerSink | AzureMySqlSink | AzurePostgreSqlSink | OrcSink | JsonSink | DelimitedTextSink;
/**
* A copy activity sink.
@@ -19438,6 +20184,88 @@ export interface OracleSink {
preCopyScript?: any;
}
+/**
+ * Contains the possible cases for ImportSettings.
+ */
+export type ImportSettingsUnion = ImportSettings | SnowflakeImportCopyCommand;
+
+/**
+ * Import command settings.
+ */
+export interface ImportSettings {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "ImportSettings";
+ /**
+ * Describes unknown properties. The value of an unknown property can be of "any" type.
+ */
+ [property: string]: any;
+}
+
+/**
+ * Snowflake import command settings.
+ */
+export interface SnowflakeImportCopyCommand {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "SnowflakeImportCopyCommand";
+ /**
+ * Additional copy options directly passed to snowflake Copy Command. Type: key value pairs
+ * (value should be string type) (or Expression with resultType object). Example:
+ * "additionalCopyOptions": { "DATE_FORMAT": "MM/DD/YYYY", "TIME_FORMAT": "'HH24:MI:SS.FF'" }
+ */
+ additionalCopyOptions?: { [propertyName: string]: any };
+ /**
+ * Additional format options directly passed to snowflake Copy Command. Type: key value pairs
+ * (value should be string type) (or Expression with resultType object). Example:
+ * "additionalFormatOptions": { "FORCE": "TRUE", "LOAD_UNCERTAIN_FILES": "'FALSE'" }
+ */
+ additionalFormatOptions?: { [propertyName: string]: any };
+}
+
+/**
+ * A copy activity snowflake sink.
+ */
+export interface SnowflakeSink {
+ /**
+ * Polymorphic Discriminator
+ */
+ type: "SnowflakeSink";
+ /**
+ * Write batch size. Type: integer (or Expression with resultType integer), minimum: 0.
+ */
+ writeBatchSize?: any;
+ /**
+ * Write batch timeout. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ writeBatchTimeout?: any;
+ /**
+ * Sink retry count. Type: integer (or Expression with resultType integer).
+ */
+ sinkRetryCount?: any;
+ /**
+ * Sink retry wait. Type: string (or Expression with resultType string), pattern:
+ * ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ sinkRetryWait?: any;
+ /**
+ * The maximum concurrent connection count for the sink data store. Type: integer (or Expression
+ * with resultType integer).
+ */
+ maxConcurrentConnections?: any;
+ /**
+ * SQL pre-copy script. Type: string (or Expression with resultType string).
+ */
+ preCopyScript?: any;
+ /**
+ * Snowflake import settings.
+ */
+ importSettings?: SnowflakeImportCopyCommand;
+}
+
/**
* Default value.
*/
@@ -20418,6 +21246,12 @@ export interface SapCloudForCustomerSink {
* 'Update'
*/
writeBehavior?: SapCloudForCustomerSinkWriteBehavior;
+ /**
+ * The timeout (TimeSpan) to get an HTTP response. It is the timeout to get a response, not the
+ * timeout to read response data. Default value: 00:05:00. Type: string (or Expression with
+ * resultType string), pattern: ((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])).
+ */
+ httpRequestTimeout?: any;
}
/**
@@ -21648,6 +22482,35 @@ export interface SelfHostedIntegrationRuntime {
linkedInfo?: LinkedIntegrationRuntimeTypeUnion;
}
+/**
+ * The entity reference.
+ */
+export interface EntityReference {
+ /**
+ * The type of this referenced entity. Possible values include: 'IntegrationRuntimeReference',
+ * 'LinkedServiceReference'
+ */
+ type?: IntegrationRuntimeEntityReferenceType;
+ /**
+ * The name of this referenced entity.
+ */
+ referenceName?: string;
+}
+
+/**
+ * Package store for the SSIS integration runtime.
+ */
+export interface PackageStore {
+ /**
+ * The name of the package store
+ */
+ name: string;
+ /**
+ * The package store linked service reference.
+ */
+ packageStoreLinkedService: EntityReference;
+}
+
/**
* Contains the possible cases for CustomSetupBase.
*/
@@ -21721,21 +22584,6 @@ export interface CmdkeySetup {
password: SecretBaseUnion;
}
-/**
- * The entity reference.
- */
-export interface EntityReference {
- /**
- * The type of this referenced entity. Possible values include: 'IntegrationRuntimeReference',
- * 'LinkedServiceReference'
- */
- type?: IntegrationRuntimeEntityReferenceType;
- /**
- * The name of this referenced entity.
- */
- referenceName?: string;
-}
-
/**
* Data proxy properties for a managed dedicated integration runtime.
*/
@@ -21826,6 +22674,10 @@ export interface IntegrationRuntimeSsisProperties {
* Custom setup without script properties for a SSIS integration runtime.
*/
expressCustomSetupProperties?: CustomSetupBaseUnion[];
+ /**
+ * Package stores for the SSIS Integration Runtime.
+ */
+ packageStores?: PackageStore[];
/**
* Describes unknown properties. The value of an unknown property can be of "any" type.
*/
@@ -23174,11 +24026,11 @@ export type SapHanaPartitionOption = 'None' | 'PhysicalPartitionsOfTable' | 'Sap
/**
* Defines values for SsisPackageLocationType.
- * Possible values include: 'SSISDB', 'File', 'InlinePackage'
+ * Possible values include: 'SSISDB', 'File', 'InlinePackage', 'PackageStore'
* @readonly
* @enum {string}
*/
-export type SsisPackageLocationType = 'SSISDB' | 'File' | 'InlinePackage';
+export type SsisPackageLocationType = 'SSISDB' | 'File' | 'InlinePackage' | 'PackageStore';
/**
* Defines values for HDInsightActivityDebugInfoOption.
diff --git a/sdk/datafactory/arm-datafactory/src/models/integrationRuntimesMappers.ts b/sdk/datafactory/arm-datafactory/src/models/integrationRuntimesMappers.ts
index 316683c70dbc..bb77a6df4138 100644
--- a/sdk/datafactory/arm-datafactory/src/models/integrationRuntimesMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/integrationRuntimesMappers.ts
@@ -98,6 +98,7 @@ export {
AzureTableStorageLinkedService,
BaseResource,
BinaryDataset,
+ BinaryReadSettings,
BinarySink,
BinarySource,
BlobEventsTrigger,
@@ -115,6 +116,7 @@ export {
CommonDataServiceForAppsSink,
CommonDataServiceForAppsSource,
ComponentSetup,
+ CompressionReadSettings,
ConcurLinkedService,
ConcurObjectDataset,
ConcurSource,
@@ -196,11 +198,14 @@ export {
EloquaSource,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
ExecuteDataFlowActivity,
ExecuteDataFlowActivityTypePropertiesCompute,
ExecutePipelineActivity,
ExecuteSSISPackageActivity,
ExecutionActivity,
+ ExportSettings,
Expression,
Factory,
FactoryGitHubConfiguration,
@@ -263,6 +268,7 @@ export {
ImpalaLinkedService,
ImpalaObjectDataset,
ImpalaSource,
+ ImportSettings,
InformixLinkedService,
InformixSink,
InformixSource,
@@ -290,6 +296,7 @@ export {
JiraSource,
JsonDataset,
JsonFormat,
+ JsonReadSettings,
JsonSink,
JsonSource,
JsonWriteSettings,
@@ -359,6 +366,7 @@ export {
OrcFormat,
OrcSink,
OrcSource,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -447,10 +455,19 @@ export {
SftpReadSettings,
SftpServerLinkedService,
SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
ShopifyLinkedService,
ShopifyObjectDataset,
ShopifySource,
SkipErrorFile,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
SparkLinkedService,
SparkObjectDataset,
SparkSource,
@@ -521,6 +538,10 @@ export {
XeroLinkedService,
XeroObjectDataset,
XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZipDeflateReadSettings,
ZohoLinkedService,
ZohoObjectDataset,
ZohoSource
diff --git a/sdk/datafactory/arm-datafactory/src/models/linkedServicesMappers.ts b/sdk/datafactory/arm-datafactory/src/models/linkedServicesMappers.ts
index a429fa7af963..ee5725307030 100644
--- a/sdk/datafactory/arm-datafactory/src/models/linkedServicesMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/linkedServicesMappers.ts
@@ -98,6 +98,7 @@ export {
AzureTableStorageLinkedService,
BaseResource,
BinaryDataset,
+ BinaryReadSettings,
BinarySink,
BinarySource,
BlobEventsTrigger,
@@ -115,6 +116,7 @@ export {
CommonDataServiceForAppsSink,
CommonDataServiceForAppsSource,
ComponentSetup,
+ CompressionReadSettings,
ConcurLinkedService,
ConcurObjectDataset,
ConcurSource,
@@ -195,11 +197,14 @@ export {
EloquaSource,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
ExecuteDataFlowActivity,
ExecuteDataFlowActivityTypePropertiesCompute,
ExecutePipelineActivity,
ExecuteSSISPackageActivity,
ExecutionActivity,
+ ExportSettings,
Expression,
Factory,
FactoryGitHubConfiguration,
@@ -262,6 +267,7 @@ export {
ImpalaLinkedService,
ImpalaObjectDataset,
ImpalaSource,
+ ImportSettings,
InformixLinkedService,
InformixSink,
InformixSource,
@@ -281,6 +287,7 @@ export {
JiraSource,
JsonDataset,
JsonFormat,
+ JsonReadSettings,
JsonSink,
JsonSource,
JsonWriteSettings,
@@ -345,6 +352,7 @@ export {
OrcFormat,
OrcSink,
OrcSource,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -431,10 +439,19 @@ export {
SftpReadSettings,
SftpServerLinkedService,
SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
ShopifyLinkedService,
ShopifyObjectDataset,
ShopifySource,
SkipErrorFile,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
SparkLinkedService,
SparkObjectDataset,
SparkSource,
@@ -504,6 +521,10 @@ export {
XeroLinkedService,
XeroObjectDataset,
XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZipDeflateReadSettings,
ZohoLinkedService,
ZohoObjectDataset,
ZohoSource
diff --git a/sdk/datafactory/arm-datafactory/src/models/mappers.ts b/sdk/datafactory/arm-datafactory/src/models/mappers.ts
index 5ceddc7199c9..42c68df96c8a 100644
--- a/sdk/datafactory/arm-datafactory/src/models/mappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/mappers.ts
@@ -3277,6 +3277,20 @@ export const DataFlowSink: msRest.CompositeMapper = {
name: "Composite",
className: "DatasetReference"
}
+ },
+ linkedService: {
+ serializedName: "linkedService",
+ type: {
+ name: "Composite",
+ className: "LinkedServiceReference"
+ }
+ },
+ schemaLinkedService: {
+ serializedName: "schemaLinkedService",
+ type: {
+ name: "Composite",
+ className: "LinkedServiceReference"
+ }
}
}
}
@@ -3295,6 +3309,20 @@ export const DataFlowSource: msRest.CompositeMapper = {
name: "Composite",
className: "DatasetReference"
}
+ },
+ linkedService: {
+ serializedName: "linkedService",
+ type: {
+ name: "Composite",
+ className: "LinkedServiceReference"
+ }
+ },
+ schemaLinkedService: {
+ serializedName: "schemaLinkedService",
+ type: {
+ name: "Composite",
+ className: "LinkedServiceReference"
+ }
}
}
}
@@ -3355,6 +3383,89 @@ export const MappingDataFlow: msRest.CompositeMapper = {
}
};
+export const SharePointOnlineListLinkedService: msRest.CompositeMapper = {
+ serializedName: "SharePointOnlineList",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator,
+ uberParent: "LinkedService",
+ className: "SharePointOnlineListLinkedService",
+ modelProperties: {
+ ...LinkedService.type.modelProperties,
+ siteUrl: {
+ required: true,
+ serializedName: "typeProperties.siteUrl",
+ type: {
+ name: "Object"
+ }
+ },
+ tenantId: {
+ required: true,
+ serializedName: "typeProperties.tenantId",
+ type: {
+ name: "Object"
+ }
+ },
+ servicePrincipalId: {
+ required: true,
+ serializedName: "typeProperties.servicePrincipalId",
+ type: {
+ name: "Object"
+ }
+ },
+ servicePrincipalKey: {
+ required: true,
+ serializedName: "typeProperties.servicePrincipalKey",
+ type: {
+ name: "Composite",
+ className: "SecretBase"
+ }
+ },
+ encryptedCredential: {
+ serializedName: "typeProperties.encryptedCredential",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: LinkedService.type.additionalProperties
+ }
+};
+
+export const SnowflakeLinkedService: msRest.CompositeMapper = {
+ serializedName: "Snowflake",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: LinkedService.type.polymorphicDiscriminator,
+ uberParent: "LinkedService",
+ className: "SnowflakeLinkedService",
+ modelProperties: {
+ ...LinkedService.type.modelProperties,
+ connectionString: {
+ required: true,
+ serializedName: "typeProperties.connectionString",
+ type: {
+ name: "Object"
+ }
+ },
+ password: {
+ serializedName: "typeProperties.password",
+ type: {
+ name: "Composite",
+ className: "AzureKeyVaultSecretReference"
+ }
+ },
+ encryptedCredential: {
+ serializedName: "typeProperties.encryptedCredential",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: LinkedService.type.additionalProperties
+ }
+};
+
export const AzureFunctionLinkedService: msRest.CompositeMapper = {
serializedName: "AzureFunction",
type: {
@@ -6144,7 +6255,6 @@ export const SapHanaLinkedService: msRest.CompositeMapper = {
}
},
server: {
- required: true,
serializedName: "typeProperties.server",
type: {
name: "Object"
@@ -8172,7 +8282,6 @@ export const AzureFileStorageLinkedService: msRest.CompositeMapper = {
modelProperties: {
...LinkedService.type.modelProperties,
host: {
- required: true,
serializedName: "typeProperties.host",
type: {
name: "Object"
@@ -8191,6 +8300,38 @@ export const AzureFileStorageLinkedService: msRest.CompositeMapper = {
className: "SecretBase"
}
},
+ connectionString: {
+ serializedName: "typeProperties.connectionString",
+ type: {
+ name: "Object"
+ }
+ },
+ accountKey: {
+ serializedName: "typeProperties.accountKey",
+ type: {
+ name: "Composite",
+ className: "AzureKeyVaultSecretReference"
+ }
+ },
+ sasUri: {
+ serializedName: "typeProperties.sasUri",
+ type: {
+ name: "Object"
+ }
+ },
+ sasToken: {
+ serializedName: "typeProperties.sasToken",
+ type: {
+ name: "Composite",
+ className: "AzureKeyVaultSecretReference"
+ }
+ },
+ fileShare: {
+ serializedName: "typeProperties.fileShare",
+ type: {
+ name: "Object"
+ }
+ },
encryptedCredential: {
serializedName: "typeProperties.encryptedCredential",
type: {
@@ -9063,6 +9204,52 @@ export const AzureStorageLinkedService: msRest.CompositeMapper = {
}
};
+export const SharePointOnlineListResourceDataset: msRest.CompositeMapper = {
+ serializedName: "SharePointOnlineListResource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator,
+ uberParent: "Dataset",
+ className: "SharePointOnlineListResourceDataset",
+ modelProperties: {
+ ...Dataset.type.modelProperties,
+ listName: {
+ serializedName: "typeProperties.listName",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: Dataset.type.additionalProperties
+ }
+};
+
+export const SnowflakeDataset: msRest.CompositeMapper = {
+ serializedName: "SnowflakeTable",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator,
+ uberParent: "Dataset",
+ className: "SnowflakeDataset",
+ modelProperties: {
+ ...Dataset.type.modelProperties,
+ snowflakeDatasetSchema: {
+ serializedName: "typeProperties.schema",
+ type: {
+ name: "Object"
+ }
+ },
+ table: {
+ serializedName: "typeProperties.table",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: Dataset.type.additionalProperties
+ }
+};
+
export const GoogleAdWordsObjectDataset: msRest.CompositeMapper = {
serializedName: "GoogleAdWordsObject",
type: {
@@ -11770,6 +11957,57 @@ export const OrcDataset: msRest.CompositeMapper = {
}
};
+export const XmlDataset: msRest.CompositeMapper = {
+ serializedName: "Xml",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator,
+ uberParent: "Dataset",
+ className: "XmlDataset",
+ modelProperties: {
+ ...Dataset.type.modelProperties,
+ location: {
+ required: true,
+ serializedName: "typeProperties.location",
+ type: {
+ name: "Composite",
+ className: "DatasetLocation",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ encodingName: {
+ serializedName: "typeProperties.encodingName",
+ type: {
+ name: "Object"
+ }
+ },
+ nullValue: {
+ serializedName: "typeProperties.nullValue",
+ type: {
+ name: "Object"
+ }
+ },
+ compression: {
+ serializedName: "typeProperties.compression",
+ type: {
+ name: "Composite",
+ className: "DatasetCompression",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ }
+ },
+ additionalProperties: Dataset.type.additionalProperties
+ }
+};
+
export const JsonDataset: msRest.CompositeMapper = {
serializedName: "Json",
type: {
@@ -11929,13 +12167,13 @@ export const ParquetDataset: msRest.CompositeMapper = {
}
};
-export const AvroDataset: msRest.CompositeMapper = {
- serializedName: "Avro",
+export const ExcelDataset: msRest.CompositeMapper = {
+ serializedName: "Excel",
type: {
name: "Composite",
polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator,
uberParent: "Dataset",
- className: "AvroDataset",
+ className: "ExcelDataset",
modelProperties: {
...Dataset.type.modelProperties,
location: {
@@ -11951,51 +12189,115 @@ export const AvroDataset: msRest.CompositeMapper = {
}
}
},
- avroCompressionCodec: {
- serializedName: "typeProperties.avroCompressionCodec",
- type: {
- name: "String"
- }
- },
- avroCompressionLevel: {
- serializedName: "typeProperties.avroCompressionLevel",
- constraints: {
- InclusiveMaximum: 9,
- InclusiveMinimum: 1
- },
- type: {
- name: "Number"
- }
- }
- },
- additionalProperties: Dataset.type.additionalProperties
- }
-};
-
-export const AmazonS3Dataset: msRest.CompositeMapper = {
- serializedName: "AmazonS3Object",
- type: {
- name: "Composite",
- polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator,
- uberParent: "Dataset",
- className: "AmazonS3Dataset",
- modelProperties: {
- ...Dataset.type.modelProperties,
- bucketName: {
+ sheetName: {
required: true,
- serializedName: "typeProperties.bucketName",
+ serializedName: "typeProperties.sheetName",
type: {
name: "Object"
}
},
- key: {
- serializedName: "typeProperties.key",
+ range: {
+ serializedName: "typeProperties.range",
type: {
name: "Object"
}
},
- prefix: {
- serializedName: "typeProperties.prefix",
+ firstRowAsHeader: {
+ serializedName: "typeProperties.firstRowAsHeader",
+ type: {
+ name: "Object"
+ }
+ },
+ compression: {
+ serializedName: "typeProperties.compression",
+ type: {
+ name: "Composite",
+ className: "DatasetCompression",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ nullValue: {
+ serializedName: "typeProperties.nullValue",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: Dataset.type.additionalProperties
+ }
+};
+
+export const AvroDataset: msRest.CompositeMapper = {
+ serializedName: "Avro",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator,
+ uberParent: "Dataset",
+ className: "AvroDataset",
+ modelProperties: {
+ ...Dataset.type.modelProperties,
+ location: {
+ required: true,
+ serializedName: "typeProperties.location",
+ type: {
+ name: "Composite",
+ className: "DatasetLocation",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ avroCompressionCodec: {
+ serializedName: "typeProperties.avroCompressionCodec",
+ type: {
+ name: "String"
+ }
+ },
+ avroCompressionLevel: {
+ serializedName: "typeProperties.avroCompressionLevel",
+ constraints: {
+ InclusiveMaximum: 9,
+ InclusiveMinimum: 1
+ },
+ type: {
+ name: "Number"
+ }
+ }
+ },
+ additionalProperties: Dataset.type.additionalProperties
+ }
+};
+
+export const AmazonS3Dataset: msRest.CompositeMapper = {
+ serializedName: "AmazonS3Object",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: Dataset.type.polymorphicDiscriminator,
+ uberParent: "Dataset",
+ className: "AmazonS3Dataset",
+ modelProperties: {
+ ...Dataset.type.modelProperties,
+ bucketName: {
+ required: true,
+ serializedName: "typeProperties.bucketName",
+ type: {
+ name: "Object"
+ }
+ },
+ key: {
+ serializedName: "typeProperties.key",
+ type: {
+ name: "Object"
+ }
+ },
+ prefix: {
+ serializedName: "typeProperties.prefix",
type: {
name: "Object"
}
@@ -13267,45 +13569,63 @@ export const AzureMLBatchExecutionActivity: msRest.CompositeMapper = {
}
};
-export const GetMetadataActivity: msRest.CompositeMapper = {
- serializedName: "GetMetadata",
+export const CompressionReadSettings: msRest.CompositeMapper = {
+ serializedName: "CompressionReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: Activity.type.polymorphicDiscriminator,
- uberParent: "Activity",
- className: "GetMetadataActivity",
+ polymorphicDiscriminator: {
+ serializedName: "type",
+ clientName: "type"
+ },
+ uberParent: "CompressionReadSettings",
+ className: "CompressionReadSettings",
modelProperties: {
- ...ExecutionActivity.type.modelProperties,
- dataset: {
+ type: {
required: true,
- serializedName: "typeProperties.dataset",
- defaultValue: {},
+ serializedName: "type",
type: {
- name: "Composite",
- className: "DatasetReference"
+ name: "String"
}
- },
- fieldList: {
- serializedName: "typeProperties.fieldList",
+ }
+ },
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+};
+
+export const ZipDeflateReadSettings: msRest.CompositeMapper = {
+ serializedName: "ZipDeflateReadSettings",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CompressionReadSettings.type.polymorphicDiscriminator,
+ uberParent: "CompressionReadSettings",
+ className: "ZipDeflateReadSettings",
+ modelProperties: {
+ ...CompressionReadSettings.type.modelProperties,
+ preserveZipFileNameAsFolder: {
+ serializedName: "preserveZipFileNameAsFolder",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Object"
- }
- }
+ name: "Object"
}
}
},
- additionalProperties: Activity.type.additionalProperties
+ additionalProperties: CompressionReadSettings.type.additionalProperties
}
};
-export const WebActivityAuthentication: msRest.CompositeMapper = {
- serializedName: "WebActivityAuthentication",
+export const FormatReadSettings: msRest.CompositeMapper = {
+ serializedName: "FormatReadSettings",
type: {
name: "Composite",
- className: "WebActivityAuthentication",
+ polymorphicDiscriminator: {
+ serializedName: "type",
+ clientName: "type"
+ },
+ uberParent: "FormatReadSettings",
+ className: "FormatReadSettings",
modelProperties: {
type: {
required: true,
@@ -13313,138 +13633,179 @@ export const WebActivityAuthentication: msRest.CompositeMapper = {
type: {
name: "String"
}
- },
- pfx: {
- serializedName: "pfx",
- type: {
- name: "Composite",
- className: "SecretBase"
- }
- },
- username: {
- serializedName: "username",
- type: {
- name: "String"
- }
- },
- password: {
- serializedName: "password",
- type: {
- name: "Composite",
- className: "SecretBase"
- }
- },
- resource: {
- serializedName: "resource",
- type: {
- name: "String"
- }
+ }
+ },
+ additionalProperties: {
+ type: {
+ name: "Object"
}
}
}
};
-export const WebActivity: msRest.CompositeMapper = {
- serializedName: "WebActivity",
+export const BinaryReadSettings: msRest.CompositeMapper = {
+ serializedName: "BinaryReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: Activity.type.polymorphicDiscriminator,
- uberParent: "Activity",
- className: "WebActivity",
+ polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator,
+ uberParent: "FormatReadSettings",
+ className: "BinaryReadSettings",
modelProperties: {
- ...ExecutionActivity.type.modelProperties,
- method: {
- required: true,
- serializedName: "typeProperties.method",
+ ...FormatReadSettings.type.modelProperties,
+ compressionProperties: {
+ serializedName: "compressionProperties",
type: {
- name: "String"
+ name: "Composite",
+ className: "CompressionReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
}
- },
- url: {
- required: true,
- serializedName: "typeProperties.url",
+ }
+ },
+ additionalProperties: FormatReadSettings.type.additionalProperties
+ }
+};
+
+export const XmlReadSettings: msRest.CompositeMapper = {
+ serializedName: "XmlReadSettings",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator,
+ uberParent: "FormatReadSettings",
+ className: "XmlReadSettings",
+ modelProperties: {
+ ...FormatReadSettings.type.modelProperties,
+ compressionProperties: {
+ serializedName: "compressionProperties",
type: {
- name: "Object"
+ name: "Composite",
+ className: "CompressionReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
}
},
- headers: {
- serializedName: "typeProperties.headers",
+ validationMode: {
+ serializedName: "validationMode",
type: {
name: "Object"
}
},
- body: {
- serializedName: "typeProperties.body",
+ namespacePrefixes: {
+ serializedName: "namespacePrefixes",
type: {
name: "Object"
}
- },
- authentication: {
- serializedName: "typeProperties.authentication",
+ }
+ },
+ additionalProperties: FormatReadSettings.type.additionalProperties
+ }
+};
+
+export const JsonReadSettings: msRest.CompositeMapper = {
+ serializedName: "JsonReadSettings",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator,
+ uberParent: "FormatReadSettings",
+ className: "JsonReadSettings",
+ modelProperties: {
+ ...FormatReadSettings.type.modelProperties,
+ compressionProperties: {
+ serializedName: "compressionProperties",
type: {
name: "Composite",
- className: "WebActivityAuthentication"
- }
- },
- datasets: {
- serializedName: "typeProperties.datasets",
- type: {
- name: "Sequence",
- element: {
+ className: "CompressionReadSettings",
+ additionalProperties: {
type: {
- name: "Composite",
- className: "DatasetReference"
+ name: "Object"
}
}
}
- },
- linkedServices: {
- serializedName: "typeProperties.linkedServices",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "LinkedServiceReference"
- }
- }
+ }
+ },
+ additionalProperties: FormatReadSettings.type.additionalProperties
+ }
+};
+
+export const DelimitedTextReadSettings: msRest.CompositeMapper = {
+ serializedName: "DelimitedTextReadSettings",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator,
+ uberParent: "FormatReadSettings",
+ className: "DelimitedTextReadSettings",
+ modelProperties: {
+ ...FormatReadSettings.type.modelProperties,
+ skipLineCount: {
+ serializedName: "skipLineCount",
+ type: {
+ name: "Object"
}
},
- connectVia: {
- serializedName: "typeProperties.connectVia",
+ compressionProperties: {
+ serializedName: "compressionProperties",
type: {
name: "Composite",
- className: "IntegrationRuntimeReference"
+ className: "CompressionReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
}
}
},
- additionalProperties: Activity.type.additionalProperties
+ additionalProperties: FormatReadSettings.type.additionalProperties
}
};
-export const CopySource: msRest.CompositeMapper = {
- serializedName: "CopySource",
+export const DistcpSettings: msRest.CompositeMapper = {
+ serializedName: "DistcpSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: {
- serializedName: "type",
- clientName: "type"
- },
- uberParent: "CopySource",
- className: "CopySource",
+ className: "DistcpSettings",
modelProperties: {
- sourceRetryCount: {
- serializedName: "sourceRetryCount",
+ resourceManagerEndpoint: {
+ required: true,
+ serializedName: "resourceManagerEndpoint",
type: {
name: "Object"
}
},
- sourceRetryWait: {
- serializedName: "sourceRetryWait",
+ tempScriptPath: {
+ required: true,
+ serializedName: "tempScriptPath",
type: {
name: "Object"
}
},
+ distcpOptions: {
+ serializedName: "distcpOptions",
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ }
+};
+
+export const StoreReadSettings: msRest.CompositeMapper = {
+ serializedName: "StoreReadSettings",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: {
+ serializedName: "type",
+ clientName: "type"
+ },
+ uberParent: "StoreReadSettings",
+ className: "StoreReadSettings",
+ modelProperties: {
maxConcurrentConnections: {
serializedName: "maxConcurrentConnections",
type: {
@@ -13467,877 +13828,938 @@ export const CopySource: msRest.CompositeMapper = {
}
};
-export const HttpSource: msRest.CompositeMapper = {
- serializedName: "HttpSource",
+export const HdfsReadSettings: msRest.CompositeMapper = {
+ serializedName: "HdfsReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "HttpSource",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "HdfsReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- httpRequestTimeout: {
- serializedName: "httpRequestTimeout",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const AzureBlobFSSource: msRest.CompositeMapper = {
- serializedName: "AzureBlobFSSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "AzureBlobFSSource",
- modelProperties: {
- ...CopySource.type.modelProperties,
- treatEmptyAsNull: {
- serializedName: "treatEmptyAsNull",
+ },
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
name: "Object"
}
},
- skipHeaderLineCount: {
- serializedName: "skipHeaderLineCount",
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
name: "Object"
}
},
- recursive: {
- serializedName: "recursive",
+ fileListPath: {
+ serializedName: "fileListPath",
type: {
name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const AzureDataLakeStoreSource: msRest.CompositeMapper = {
- serializedName: "AzureDataLakeStoreSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "AzureDataLakeStoreSource",
- modelProperties: {
- ...CopySource.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ },
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
+ type: {
+ name: "Boolean"
+ }
+ },
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
type: {
name: "Object"
}
+ },
+ distcpSettings: {
+ serializedName: "distcpSettings",
+ type: {
+ name: "Composite",
+ className: "DistcpSettings"
+ }
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const Office365Source: msRest.CompositeMapper = {
- serializedName: "Office365Source",
+export const HttpReadSettings: msRest.CompositeMapper = {
+ serializedName: "HttpReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "Office365Source",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "HttpReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- allowedGroups: {
- serializedName: "allowedGroups",
+ ...StoreReadSettings.type.modelProperties,
+ requestMethod: {
+ serializedName: "requestMethod",
type: {
name: "Object"
}
},
- userScopeFilterUri: {
- serializedName: "userScopeFilterUri",
+ requestBody: {
+ serializedName: "requestBody",
type: {
name: "Object"
}
},
- dateFilterColumn: {
- serializedName: "dateFilterColumn",
+ additionalHeaders: {
+ serializedName: "additionalHeaders",
type: {
name: "Object"
}
},
- startTime: {
- serializedName: "startTime",
+ requestTimeout: {
+ serializedName: "requestTimeout",
type: {
name: "Object"
}
},
- endTime: {
- serializedName: "endTime",
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
type: {
- name: "Object"
+ name: "Boolean"
}
},
- outputColumns: {
- serializedName: "outputColumns",
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
type: {
name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const AdditionalColumns: msRest.CompositeMapper = {
- serializedName: "AdditionalColumns",
+export const SftpReadSettings: msRest.CompositeMapper = {
+ serializedName: "SftpReadSettings",
type: {
name: "Composite",
- className: "AdditionalColumns",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "SftpReadSettings",
modelProperties: {
- name: {
- serializedName: "name",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- value: {
- serializedName: "value",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
name: "Object"
}
- }
- }
- }
-};
-
-export const MongoDbCursorMethodsProperties: msRest.CompositeMapper = {
- serializedName: "MongoDbCursorMethodsProperties",
- type: {
- name: "Composite",
- className: "MongoDbCursorMethodsProperties",
- modelProperties: {
- project: {
- serializedName: "project",
+ },
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
name: "Object"
}
},
- sort: {
- serializedName: "sort",
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
+ type: {
+ name: "Boolean"
+ }
+ },
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
type: {
name: "Object"
}
},
- skip: {
- serializedName: "skip",
+ fileListPath: {
+ serializedName: "fileListPath",
type: {
name: "Object"
}
},
- limit: {
- serializedName: "limit",
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
type: {
name: "Object"
}
}
- },
- additionalProperties: {
- type: {
- name: "Object"
- }
- }
+ },
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const CosmosDbMongoDbApiSource: msRest.CompositeMapper = {
- serializedName: "CosmosDbMongoDbApiSource",
+export const FtpReadSettings: msRest.CompositeMapper = {
+ serializedName: "FtpReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "CosmosDbMongoDbApiSource",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "FtpReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- filter: {
- serializedName: "filter",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- cursorMethods: {
- serializedName: "cursorMethods",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
- name: "Composite",
- className: "MongoDbCursorMethodsProperties",
- additionalProperties: {
- type: {
- name: "Object"
- }
- }
+ name: "Object"
}
},
- batchSize: {
- serializedName: "batchSize",
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
name: "Object"
}
},
- queryTimeout: {
- serializedName: "queryTimeout",
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
+ type: {
+ name: "Boolean"
+ }
+ },
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ fileListPath: {
+ serializedName: "fileListPath",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
+ }
+ },
+ useBinaryTransfer: {
+ serializedName: "useBinaryTransfer",
+ type: {
+ name: "Boolean"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const MongoDbV2Source: msRest.CompositeMapper = {
- serializedName: "MongoDbV2Source",
+export const GoogleCloudStorageReadSettings: msRest.CompositeMapper = {
+ serializedName: "GoogleCloudStorageReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "MongoDbV2Source",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "GoogleCloudStorageReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- filter: {
- serializedName: "filter",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- cursorMethods: {
- serializedName: "cursorMethods",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
- name: "Composite",
- className: "MongoDbCursorMethodsProperties",
- additionalProperties: {
- type: {
- name: "Object"
- }
- }
+ name: "Object"
}
},
- batchSize: {
- serializedName: "batchSize",
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
name: "Object"
}
},
- queryTimeout: {
- serializedName: "queryTimeout",
+ prefix: {
+ serializedName: "prefix",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ fileListPath: {
+ serializedName: "fileListPath",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const MongoDbSource: msRest.CompositeMapper = {
- serializedName: "MongoDbSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "MongoDbSource",
- modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ },
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
+ type: {
+ name: "Boolean"
+ }
+ },
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const WebSource: msRest.CompositeMapper = {
- serializedName: "WebSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "WebSource",
- modelProperties: {
- ...CopySource.type.modelProperties,
- additionalColumns: {
- serializedName: "additionalColumns",
+ },
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const OraclePartitionSettings: msRest.CompositeMapper = {
- serializedName: "OraclePartitionSettings",
+export const AzureFileStorageReadSettings: msRest.CompositeMapper = {
+ serializedName: "AzureFileStorageReadSettings",
type: {
name: "Composite",
- className: "OraclePartitionSettings",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "AzureFileStorageReadSettings",
modelProperties: {
- partitionNames: {
- serializedName: "partitionNames",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- partitionColumnName: {
- serializedName: "partitionColumnName",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
name: "Object"
}
},
- partitionUpperBound: {
- serializedName: "partitionUpperBound",
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
name: "Object"
}
},
- partitionLowerBound: {
- serializedName: "partitionLowerBound",
+ prefix: {
+ serializedName: "prefix",
type: {
name: "Object"
}
- }
- }
- }
-};
-
-export const OracleSource: msRest.CompositeMapper = {
- serializedName: "OracleSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "OracleSource",
- modelProperties: {
- ...CopySource.type.modelProperties,
- oracleReaderQuery: {
- serializedName: "oracleReaderQuery",
+ },
+ fileListPath: {
+ serializedName: "fileListPath",
type: {
name: "Object"
}
},
- queryTimeout: {
- serializedName: "queryTimeout",
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
type: {
- name: "Object"
+ name: "Boolean"
}
},
- partitionOption: {
- serializedName: "partitionOption",
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
type: {
- name: "String"
+ name: "Object"
}
},
- partitionSettings: {
- serializedName: "partitionSettings",
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
type: {
- name: "Composite",
- className: "OraclePartitionSettings"
+ name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const AzureDataExplorerSource: msRest.CompositeMapper = {
- serializedName: "AzureDataExplorerSource",
+export const FileServerReadSettings: msRest.CompositeMapper = {
+ serializedName: "FileServerReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "AzureDataExplorerSource",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "FileServerReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- required: true,
- serializedName: "query",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- noTruncation: {
- serializedName: "noTruncation",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
name: "Object"
}
},
- queryTimeout: {
- serializedName: "queryTimeout",
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ fileListPath: {
+ serializedName: "fileListPath",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const DistcpSettings: msRest.CompositeMapper = {
- serializedName: "DistcpSettings",
- type: {
- name: "Composite",
- className: "DistcpSettings",
- modelProperties: {
- resourceManagerEndpoint: {
- required: true,
- serializedName: "resourceManagerEndpoint",
+ },
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
type: {
- name: "Object"
+ name: "Boolean"
}
},
- tempScriptPath: {
- required: true,
- serializedName: "tempScriptPath",
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
type: {
name: "Object"
}
},
- distcpOptions: {
- serializedName: "distcpOptions",
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
type: {
name: "Object"
}
- }
- }
- }
-};
-
-export const HdfsSource: msRest.CompositeMapper = {
- serializedName: "HdfsSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "HdfsSource",
- modelProperties: {
- ...CopySource.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ },
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
type: {
name: "Object"
}
},
- distcpSettings: {
- serializedName: "distcpSettings",
+ fileFilter: {
+ serializedName: "fileFilter",
type: {
- name: "Composite",
- className: "DistcpSettings"
+ name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const FileSystemSource: msRest.CompositeMapper = {
- serializedName: "FileSystemSource",
+export const AmazonS3ReadSettings: msRest.CompositeMapper = {
+ serializedName: "AmazonS3ReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "FileSystemSource",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "AmazonS3ReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
+ ...StoreReadSettings.type.modelProperties,
recursive: {
serializedName: "recursive",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const RestSource: msRest.CompositeMapper = {
- serializedName: "RestSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "RestSource",
- modelProperties: {
- ...CopySource.type.modelProperties,
- requestMethod: {
- serializedName: "requestMethod",
+ },
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
name: "Object"
}
},
- requestBody: {
- serializedName: "requestBody",
+ prefix: {
+ serializedName: "prefix",
type: {
name: "Object"
}
},
- additionalHeaders: {
- serializedName: "additionalHeaders",
+ fileListPath: {
+ serializedName: "fileListPath",
type: {
name: "Object"
}
},
- paginationRules: {
- serializedName: "paginationRules",
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
type: {
- name: "Object"
+ name: "Boolean"
}
},
- httpRequestTimeout: {
- serializedName: "httpRequestTimeout",
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
type: {
name: "Object"
}
},
- requestInterval: {
- serializedName: "requestInterval",
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const SalesforceServiceCloudSource: msRest.CompositeMapper = {
- serializedName: "SalesforceServiceCloudSource",
+export const AzureDataLakeStoreReadSettings: msRest.CompositeMapper = {
+ serializedName: "AzureDataLakeStoreReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "SalesforceServiceCloudSource",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "AzureDataLakeStoreReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- readBehavior: {
- serializedName: "readBehavior",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
- name: "String"
+ name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
+ }
+ },
+ fileListPath: {
+ serializedName: "fileListPath",
+ type: {
+ name: "Object"
+ }
+ },
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
+ type: {
+ name: "Boolean"
+ }
+ },
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
+ type: {
+ name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const ODataSource: msRest.CompositeMapper = {
- serializedName: "ODataSource",
+export const AzureBlobFSReadSettings: msRest.CompositeMapper = {
+ serializedName: "AzureBlobFSReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "ODataSource",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "AzureBlobFSReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
+ type: {
+ name: "Object"
+ }
+ },
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
+ type: {
+ name: "Object"
+ }
+ },
+ fileListPath: {
+ serializedName: "fileListPath",
+ type: {
+ name: "Object"
+ }
+ },
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
+ type: {
+ name: "Boolean"
+ }
+ },
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
+ type: {
+ name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const MicrosoftAccessSource: msRest.CompositeMapper = {
- serializedName: "MicrosoftAccessSource",
+export const AzureBlobStorageReadSettings: msRest.CompositeMapper = {
+ serializedName: "AzureBlobStorageReadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "MicrosoftAccessSource",
+ polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
+ uberParent: "StoreReadSettings",
+ className: "AzureBlobStorageReadSettings",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...StoreReadSettings.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ wildcardFolderPath: {
+ serializedName: "wildcardFolderPath",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
+ }
+ },
+ wildcardFileName: {
+ serializedName: "wildcardFileName",
+ type: {
+ name: "Object"
+ }
+ },
+ prefix: {
+ serializedName: "prefix",
+ type: {
+ name: "Object"
+ }
+ },
+ fileListPath: {
+ serializedName: "fileListPath",
+ type: {
+ name: "Object"
+ }
+ },
+ enablePartitionDiscovery: {
+ serializedName: "enablePartitionDiscovery",
+ type: {
+ name: "Boolean"
+ }
+ },
+ partitionRootPath: {
+ serializedName: "partitionRootPath",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeStart: {
+ serializedName: "modifiedDatetimeStart",
+ type: {
+ name: "Object"
+ }
+ },
+ modifiedDatetimeEnd: {
+ serializedName: "modifiedDatetimeEnd",
+ type: {
+ name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: StoreReadSettings.type.additionalProperties
}
};
-export const RelationalSource: msRest.CompositeMapper = {
- serializedName: "RelationalSource",
+export const GetMetadataActivity: msRest.CompositeMapper = {
+ serializedName: "GetMetadata",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "RelationalSource",
+ polymorphicDiscriminator: Activity.type.polymorphicDiscriminator,
+ uberParent: "Activity",
+ className: "GetMetadataActivity",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...ExecutionActivity.type.modelProperties,
+ dataset: {
+ required: true,
+ serializedName: "typeProperties.dataset",
+ defaultValue: {},
type: {
- name: "Object"
+ name: "Composite",
+ className: "DatasetReference"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ fieldList: {
+ serializedName: "typeProperties.fieldList",
type: {
name: "Sequence",
element: {
type: {
- name: "Composite",
- className: "AdditionalColumns"
+ name: "Object"
+ }
+ }
+ }
+ },
+ storeSettings: {
+ serializedName: "typeProperties.storeSettings",
+ type: {
+ name: "Composite",
+ className: "StoreReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ formatSettings: {
+ serializedName: "typeProperties.formatSettings",
+ type: {
+ name: "Composite",
+ className: "FormatReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
}
}
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: Activity.type.additionalProperties
}
};
-export const CommonDataServiceForAppsSource: msRest.CompositeMapper = {
- serializedName: "CommonDataServiceForAppsSource",
+export const WebActivityAuthentication: msRest.CompositeMapper = {
+ serializedName: "WebActivityAuthentication",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "CommonDataServiceForAppsSource",
+ className: "WebActivityAuthentication",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ type: {
+ required: true,
+ serializedName: "type",
type: {
- name: "Object"
+ name: "String"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ pfx: {
+ serializedName: "pfx",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Composite",
+ className: "SecretBase"
+ }
+ },
+ username: {
+ serializedName: "username",
+ type: {
+ name: "String"
+ }
+ },
+ password: {
+ serializedName: "password",
+ type: {
+ name: "Composite",
+ className: "SecretBase"
+ }
+ },
+ resource: {
+ serializedName: "resource",
+ type: {
+ name: "String"
}
}
- },
- additionalProperties: CopySource.type.additionalProperties
+ }
}
};
-export const DynamicsCrmSource: msRest.CompositeMapper = {
- serializedName: "DynamicsCrmSource",
+export const WebActivity: msRest.CompositeMapper = {
+ serializedName: "WebActivity",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "DynamicsCrmSource",
+ polymorphicDiscriminator: Activity.type.polymorphicDiscriminator,
+ uberParent: "Activity",
+ className: "WebActivity",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...ExecutionActivity.type.modelProperties,
+ method: {
+ required: true,
+ serializedName: "typeProperties.method",
+ type: {
+ name: "String"
+ }
+ },
+ url: {
+ required: true,
+ serializedName: "typeProperties.url",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ headers: {
+ serializedName: "typeProperties.headers",
+ type: {
+ name: "Object"
+ }
+ },
+ body: {
+ serializedName: "typeProperties.body",
+ type: {
+ name: "Object"
+ }
+ },
+ authentication: {
+ serializedName: "typeProperties.authentication",
+ type: {
+ name: "Composite",
+ className: "WebActivityAuthentication"
+ }
+ },
+ datasets: {
+ serializedName: "typeProperties.datasets",
type: {
name: "Sequence",
element: {
type: {
name: "Composite",
- className: "AdditionalColumns"
+ className: "DatasetReference"
+ }
+ }
+ }
+ },
+ linkedServices: {
+ serializedName: "typeProperties.linkedServices",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "LinkedServiceReference"
}
}
}
+ },
+ connectVia: {
+ serializedName: "typeProperties.connectVia",
+ type: {
+ name: "Composite",
+ className: "IntegrationRuntimeReference"
+ }
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: Activity.type.additionalProperties
}
};
-export const DynamicsSource: msRest.CompositeMapper = {
- serializedName: "DynamicsSource",
+export const CopySource: msRest.CompositeMapper = {
+ serializedName: "CopySource",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ polymorphicDiscriminator: {
+ serializedName: "type",
+ clientName: "type"
+ },
uberParent: "CopySource",
- className: "DynamicsSource",
+ className: "CopySource",
modelProperties: {
- ...CopySource.type.modelProperties,
- query: {
- serializedName: "query",
+ sourceRetryCount: {
+ serializedName: "sourceRetryCount",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ sourceRetryWait: {
+ serializedName: "sourceRetryWait",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
+ }
+ },
+ maxConcurrentConnections: {
+ serializedName: "maxConcurrentConnections",
+ type: {
+ name: "Object"
+ }
+ },
+ type: {
+ required: true,
+ serializedName: "type",
+ type: {
+ name: "String"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
}
};
-export const CosmosDbSqlApiSource: msRest.CompositeMapper = {
- serializedName: "CosmosDbSqlApiSource",
+export const SharePointOnlineListSource: msRest.CompositeMapper = {
+ serializedName: "SharePointOnlineListSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "CosmosDbSqlApiSource",
+ className: "SharePointOnlineListSource",
modelProperties: {
...CopySource.type.modelProperties,
query: {
@@ -14346,42 +14768,87 @@ export const CosmosDbSqlApiSource: msRest.CompositeMapper = {
name: "Object"
}
},
- pageSize: {
- serializedName: "pageSize",
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
type: {
name: "Object"
}
- },
- preferredRegions: {
- serializedName: "preferredRegions",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const ExportSettings: msRest.CompositeMapper = {
+ serializedName: "ExportSettings",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: {
+ serializedName: "type",
+ clientName: "type"
+ },
+ uberParent: "ExportSettings",
+ className: "ExportSettings",
+ modelProperties: {
+ type: {
+ required: true,
+ serializedName: "type",
type: {
- name: "Object"
+ name: "String"
+ }
+ }
+ },
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+};
+
+export const SnowflakeExportCopyCommand: msRest.CompositeMapper = {
+ serializedName: "SnowflakeExportCopyCommand",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: ExportSettings.type.polymorphicDiscriminator,
+ uberParent: "ExportSettings",
+ className: "SnowflakeExportCopyCommand",
+ modelProperties: {
+ ...ExportSettings.type.modelProperties,
+ additionalCopyOptions: {
+ serializedName: "additionalCopyOptions",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "Object"
+ }
+ }
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ additionalFormatOptions: {
+ serializedName: "additionalFormatOptions",
type: {
- name: "Sequence",
- element: {
+ name: "Dictionary",
+ value: {
type: {
- name: "Composite",
- className: "AdditionalColumns"
+ name: "Object"
}
}
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: ExportSettings.type.additionalProperties
}
};
-export const DocumentDbCollectionSource: msRest.CompositeMapper = {
- serializedName: "DocumentDbCollectionSource",
+export const SnowflakeSource: msRest.CompositeMapper = {
+ serializedName: "SnowflakeSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "DocumentDbCollectionSource",
+ className: "SnowflakeSource",
modelProperties: {
...CopySource.type.modelProperties,
query: {
@@ -14390,42 +14857,46 @@ export const DocumentDbCollectionSource: msRest.CompositeMapper = {
name: "Object"
}
},
- nestingSeparator: {
- serializedName: "nestingSeparator",
+ exportSettings: {
+ serializedName: "exportSettings",
type: {
- name: "Object"
+ name: "Composite",
+ className: "SnowflakeExportCopyCommand",
+ additionalProperties: ExportSettings.type.additionalProperties
}
- },
- queryTimeout: {
- serializedName: "queryTimeout",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const HttpSource: msRest.CompositeMapper = {
+ serializedName: "HttpSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "HttpSource",
+ modelProperties: {
+ ...CopySource.type.modelProperties,
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
type: {
name: "Object"
}
- },
- additionalColumns: {
- serializedName: "additionalColumns",
- type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
- }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const BlobSource: msRest.CompositeMapper = {
- serializedName: "BlobSource",
+export const AzureBlobFSSource: msRest.CompositeMapper = {
+ serializedName: "AzureBlobFSSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "BlobSource",
+ className: "AzureBlobFSSource",
modelProperties: {
...CopySource.type.modelProperties,
treatEmptyAsNull: {
@@ -14451,84 +14922,69 @@ export const BlobSource: msRest.CompositeMapper = {
}
};
-export const RedshiftUnloadSettings: msRest.CompositeMapper = {
- serializedName: "RedshiftUnloadSettings",
+export const AzureDataLakeStoreSource: msRest.CompositeMapper = {
+ serializedName: "AzureDataLakeStoreSource",
type: {
name: "Composite",
- className: "RedshiftUnloadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "AzureDataLakeStoreSource",
modelProperties: {
- s3LinkedServiceName: {
- required: true,
- serializedName: "s3LinkedServiceName",
- defaultValue: {},
- type: {
- name: "Composite",
- className: "LinkedServiceReference"
- }
- },
- bucketName: {
- required: true,
- serializedName: "bucketName",
+ ...CopySource.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
}
- }
+ },
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const TabularSource: msRest.CompositeMapper = {
- serializedName: "TabularSource",
+export const Office365Source: msRest.CompositeMapper = {
+ serializedName: "Office365Source",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "TabularSource",
+ className: "Office365Source",
modelProperties: {
...CopySource.type.modelProperties,
- queryTimeout: {
- serializedName: "queryTimeout",
+ allowedGroups: {
+ serializedName: "allowedGroups",
type: {
name: "Object"
}
},
- additionalColumns: {
- serializedName: "additionalColumns",
+ userScopeFilterUri: {
+ serializedName: "userScopeFilterUri",
type: {
- name: "Sequence",
- element: {
- type: {
- name: "Composite",
- className: "AdditionalColumns"
- }
- }
+ name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const AmazonRedshiftSource: msRest.CompositeMapper = {
- serializedName: "AmazonRedshiftSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "AmazonRedshiftSource",
- modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ },
+ dateFilterColumn: {
+ serializedName: "dateFilterColumn",
type: {
name: "Object"
}
},
- redshiftUnloadSettings: {
- serializedName: "redshiftUnloadSettings",
+ startTime: {
+ serializedName: "startTime",
type: {
- name: "Composite",
- className: "RedshiftUnloadSettings"
+ name: "Object"
+ }
+ },
+ endTime: {
+ serializedName: "endTime",
+ type: {
+ name: "Object"
+ }
+ },
+ outputColumns: {
+ serializedName: "outputColumns",
+ type: {
+ name: "Object"
}
}
},
@@ -14536,119 +14992,230 @@ export const AmazonRedshiftSource: msRest.CompositeMapper = {
}
};
-export const GoogleAdWordsSource: msRest.CompositeMapper = {
- serializedName: "GoogleAdWordsSource",
+export const AdditionalColumns: msRest.CompositeMapper = {
+ serializedName: "AdditionalColumns",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "GoogleAdWordsSource",
+ className: "AdditionalColumns",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ name: {
+ serializedName: "name",
+ type: {
+ name: "Object"
+ }
+ },
+ value: {
+ serializedName: "value",
type: {
name: "Object"
}
}
- },
- additionalProperties: CopySource.type.additionalProperties
+ }
}
};
-export const OracleServiceCloudSource: msRest.CompositeMapper = {
- serializedName: "OracleServiceCloudSource",
+export const MongoDbCursorMethodsProperties: msRest.CompositeMapper = {
+ serializedName: "MongoDbCursorMethodsProperties",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "OracleServiceCloudSource",
+ className: "MongoDbCursorMethodsProperties",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ project: {
+ serializedName: "project",
+ type: {
+ name: "Object"
+ }
+ },
+ sort: {
+ serializedName: "sort",
+ type: {
+ name: "Object"
+ }
+ },
+ skip: {
+ serializedName: "skip",
+ type: {
+ name: "Object"
+ }
+ },
+ limit: {
+ serializedName: "limit",
type: {
name: "Object"
}
}
},
- additionalProperties: CopySource.type.additionalProperties
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
}
};
-export const DynamicsAXSource: msRest.CompositeMapper = {
- serializedName: "DynamicsAXSource",
+export const CosmosDbMongoDbApiSource: msRest.CompositeMapper = {
+ serializedName: "CosmosDbMongoDbApiSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "DynamicsAXSource",
+ className: "CosmosDbMongoDbApiSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ filter: {
+ serializedName: "filter",
+ type: {
+ name: "Object"
+ }
+ },
+ cursorMethods: {
+ serializedName: "cursorMethods",
+ type: {
+ name: "Composite",
+ className: "MongoDbCursorMethodsProperties",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ batchSize: {
+ serializedName: "batchSize",
+ type: {
+ name: "Object"
+ }
+ },
+ queryTimeout: {
+ serializedName: "queryTimeout",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const ResponsysSource: msRest.CompositeMapper = {
- serializedName: "ResponsysSource",
+export const MongoDbV2Source: msRest.CompositeMapper = {
+ serializedName: "MongoDbV2Source",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "ResponsysSource",
+ className: "MongoDbV2Source",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ filter: {
+ serializedName: "filter",
+ type: {
+ name: "Object"
+ }
+ },
+ cursorMethods: {
+ serializedName: "cursorMethods",
+ type: {
+ name: "Composite",
+ className: "MongoDbCursorMethodsProperties",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ batchSize: {
+ serializedName: "batchSize",
+ type: {
+ name: "Object"
+ }
+ },
+ queryTimeout: {
+ serializedName: "queryTimeout",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const SalesforceMarketingCloudSource: msRest.CompositeMapper = {
- serializedName: "SalesforceMarketingCloudSource",
+export const MongoDbSource: msRest.CompositeMapper = {
+ serializedName: "MongoDbSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SalesforceMarketingCloudSource",
+ className: "MongoDbSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const VerticaSource: msRest.CompositeMapper = {
- serializedName: "VerticaSource",
+export const WebSource: msRest.CompositeMapper = {
+ serializedName: "WebSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "VerticaSource",
+ className: "WebSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ additionalColumns: {
+ serializedName: "additionalColumns",
type: {
- name: "Object"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
}
}
},
@@ -14656,12 +15223,18 @@ export const VerticaSource: msRest.CompositeMapper = {
}
};
-export const NetezzaPartitionSettings: msRest.CompositeMapper = {
- serializedName: "NetezzaPartitionSettings",
+export const OraclePartitionSettings: msRest.CompositeMapper = {
+ serializedName: "OraclePartitionSettings",
type: {
name: "Composite",
- className: "NetezzaPartitionSettings",
+ className: "OraclePartitionSettings",
modelProperties: {
+ partitionNames: {
+ serializedName: "partitionNames",
+ type: {
+ name: "Object"
+ }
+ },
partitionColumnName: {
serializedName: "partitionColumnName",
type: {
@@ -14684,17 +15257,23 @@ export const NetezzaPartitionSettings: msRest.CompositeMapper = {
}
};
-export const NetezzaSource: msRest.CompositeMapper = {
- serializedName: "NetezzaSource",
+export const OracleSource: msRest.CompositeMapper = {
+ serializedName: "OracleSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "NetezzaSource",
+ className: "OracleSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ oracleReaderQuery: {
+ serializedName: "oracleReaderQuery",
+ type: {
+ name: "Object"
+ }
+ },
+ queryTimeout: {
+ serializedName: "queryTimeout",
type: {
name: "Object"
}
@@ -14709,27 +15288,19 @@ export const NetezzaSource: msRest.CompositeMapper = {
serializedName: "partitionSettings",
type: {
name: "Composite",
- className: "NetezzaPartitionSettings"
+ className: "OraclePartitionSettings"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const ZohoSource: msRest.CompositeMapper = {
- serializedName: "ZohoSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "ZohoSource",
- modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
type: {
- name: "Object"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
}
}
},
@@ -14737,79 +15308,44 @@ export const ZohoSource: msRest.CompositeMapper = {
}
};
-export const XeroSource: msRest.CompositeMapper = {
- serializedName: "XeroSource",
+export const AzureDataExplorerSource: msRest.CompositeMapper = {
+ serializedName: "AzureDataExplorerSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "XeroSource",
+ className: "AzureDataExplorerSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
+ required: true,
serializedName: "query",
type: {
name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const SquareSource: msRest.CompositeMapper = {
- serializedName: "SquareSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "SquareSource",
- modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ },
+ noTruncation: {
+ serializedName: "noTruncation",
type: {
name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const SparkSource: msRest.CompositeMapper = {
- serializedName: "SparkSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "SparkSource",
- modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ },
+ queryTimeout: {
+ serializedName: "queryTimeout",
type: {
name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const ShopifySource: msRest.CompositeMapper = {
- serializedName: "ShopifySource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "ShopifySource",
- modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
type: {
- name: "Object"
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
}
}
},
@@ -14817,39 +15353,26 @@ export const ShopifySource: msRest.CompositeMapper = {
}
};
-export const ServiceNowSource: msRest.CompositeMapper = {
- serializedName: "ServiceNowSource",
+export const HdfsSource: msRest.CompositeMapper = {
+ serializedName: "HdfsSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "ServiceNowSource",
+ className: "HdfsSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
- }
- },
- additionalProperties: CopySource.type.additionalProperties
- }
-};
-
-export const QuickBooksSource: msRest.CompositeMapper = {
- serializedName: "QuickBooksSource",
- type: {
- name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "QuickBooksSource",
- modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ },
+ distcpSettings: {
+ serializedName: "distcpSettings",
type: {
- name: "Object"
+ name: "Composite",
+ className: "DistcpSettings"
}
}
},
@@ -14857,237 +15380,447 @@ export const QuickBooksSource: msRest.CompositeMapper = {
}
};
-export const PrestoSource: msRest.CompositeMapper = {
- serializedName: "PrestoSource",
+export const FileSystemSource: msRest.CompositeMapper = {
+ serializedName: "FileSystemSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "PrestoSource",
+ className: "FileSystemSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const PhoenixSource: msRest.CompositeMapper = {
- serializedName: "PhoenixSource",
+export const RestSource: msRest.CompositeMapper = {
+ serializedName: "RestSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "PhoenixSource",
+ className: "RestSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ requestMethod: {
+ serializedName: "requestMethod",
+ type: {
+ name: "Object"
+ }
+ },
+ requestBody: {
+ serializedName: "requestBody",
+ type: {
+ name: "Object"
+ }
+ },
+ additionalHeaders: {
+ serializedName: "additionalHeaders",
+ type: {
+ name: "Object"
+ }
+ },
+ paginationRules: {
+ serializedName: "paginationRules",
+ type: {
+ name: "Object"
+ }
+ },
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
+ type: {
+ name: "Object"
+ }
+ },
+ requestInterval: {
+ serializedName: "requestInterval",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const PaypalSource: msRest.CompositeMapper = {
- serializedName: "PaypalSource",
+export const SalesforceServiceCloudSource: msRest.CompositeMapper = {
+ serializedName: "SalesforceServiceCloudSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "PaypalSource",
+ className: "SalesforceServiceCloudSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ readBehavior: {
+ serializedName: "readBehavior",
+ type: {
+ name: "String"
+ }
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const MarketoSource: msRest.CompositeMapper = {
- serializedName: "MarketoSource",
+export const ODataSource: msRest.CompositeMapper = {
+ serializedName: "ODataSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "MarketoSource",
+ className: "ODataSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
+ type: {
+ name: "Object"
+ }
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const AzureMariaDBSource: msRest.CompositeMapper = {
- serializedName: "AzureMariaDBSource",
+export const MicrosoftAccessSource: msRest.CompositeMapper = {
+ serializedName: "MicrosoftAccessSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "AzureMariaDBSource",
+ className: "MicrosoftAccessSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const MariaDBSource: msRest.CompositeMapper = {
- serializedName: "MariaDBSource",
+export const RelationalSource: msRest.CompositeMapper = {
+ serializedName: "RelationalSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "MariaDBSource",
+ className: "RelationalSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const MagentoSource: msRest.CompositeMapper = {
- serializedName: "MagentoSource",
+export const CommonDataServiceForAppsSource: msRest.CompositeMapper = {
+ serializedName: "CommonDataServiceForAppsSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "MagentoSource",
+ className: "CommonDataServiceForAppsSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const JiraSource: msRest.CompositeMapper = {
- serializedName: "JiraSource",
+export const DynamicsCrmSource: msRest.CompositeMapper = {
+ serializedName: "DynamicsCrmSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "JiraSource",
+ className: "DynamicsCrmSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const ImpalaSource: msRest.CompositeMapper = {
- serializedName: "ImpalaSource",
+export const DynamicsSource: msRest.CompositeMapper = {
+ serializedName: "DynamicsSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "ImpalaSource",
+ className: "DynamicsSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const HubspotSource: msRest.CompositeMapper = {
- serializedName: "HubspotSource",
+export const CosmosDbSqlApiSource: msRest.CompositeMapper = {
+ serializedName: "CosmosDbSqlApiSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "HubspotSource",
+ className: "CosmosDbSqlApiSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ pageSize: {
+ serializedName: "pageSize",
+ type: {
+ name: "Object"
+ }
+ },
+ preferredRegions: {
+ serializedName: "preferredRegions",
+ type: {
+ name: "Object"
+ }
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const HiveSource: msRest.CompositeMapper = {
- serializedName: "HiveSource",
+export const DocumentDbCollectionSource: msRest.CompositeMapper = {
+ serializedName: "DocumentDbCollectionSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "HiveSource",
+ className: "DocumentDbCollectionSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
+ ...CopySource.type.modelProperties,
query: {
serializedName: "query",
type: {
name: "Object"
}
+ },
+ nestingSeparator: {
+ serializedName: "nestingSeparator",
+ type: {
+ name: "Object"
+ }
+ },
+ queryTimeout: {
+ serializedName: "queryTimeout",
+ type: {
+ name: "Object"
+ }
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const HBaseSource: msRest.CompositeMapper = {
- serializedName: "HBaseSource",
+export const BlobSource: msRest.CompositeMapper = {
+ serializedName: "BlobSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "HBaseSource",
+ className: "BlobSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ treatEmptyAsNull: {
+ serializedName: "treatEmptyAsNull",
+ type: {
+ name: "Object"
+ }
+ },
+ skipHeaderLineCount: {
+ serializedName: "skipHeaderLineCount",
+ type: {
+ name: "Object"
+ }
+ },
+ recursive: {
+ serializedName: "recursive",
type: {
name: "Object"
}
@@ -15097,53 +15830,71 @@ export const HBaseSource: msRest.CompositeMapper = {
}
};
-export const GreenplumSource: msRest.CompositeMapper = {
- serializedName: "GreenplumSource",
+export const RedshiftUnloadSettings: msRest.CompositeMapper = {
+ serializedName: "RedshiftUnloadSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
- uberParent: "CopySource",
- className: "GreenplumSource",
+ className: "RedshiftUnloadSettings",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ s3LinkedServiceName: {
+ required: true,
+ serializedName: "s3LinkedServiceName",
+ defaultValue: {},
+ type: {
+ name: "Composite",
+ className: "LinkedServiceReference"
+ }
+ },
+ bucketName: {
+ required: true,
+ serializedName: "bucketName",
type: {
name: "Object"
}
}
- },
- additionalProperties: CopySource.type.additionalProperties
+ }
}
};
-export const GoogleBigQuerySource: msRest.CompositeMapper = {
- serializedName: "GoogleBigQuerySource",
+export const TabularSource: msRest.CompositeMapper = {
+ serializedName: "TabularSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "GoogleBigQuerySource",
+ className: "TabularSource",
modelProperties: {
- ...TabularSource.type.modelProperties,
- query: {
- serializedName: "query",
+ ...CopySource.type.modelProperties,
+ queryTimeout: {
+ serializedName: "queryTimeout",
type: {
name: "Object"
}
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const EloquaSource: msRest.CompositeMapper = {
- serializedName: "EloquaSource",
+export const AmazonRedshiftSource: msRest.CompositeMapper = {
+ serializedName: "AmazonRedshiftSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "EloquaSource",
+ className: "AmazonRedshiftSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15151,19 +15902,26 @@ export const EloquaSource: msRest.CompositeMapper = {
type: {
name: "Object"
}
+ },
+ redshiftUnloadSettings: {
+ serializedName: "redshiftUnloadSettings",
+ type: {
+ name: "Composite",
+ className: "RedshiftUnloadSettings"
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const DrillSource: msRest.CompositeMapper = {
- serializedName: "DrillSource",
+export const GoogleAdWordsSource: msRest.CompositeMapper = {
+ serializedName: "GoogleAdWordsSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "DrillSource",
+ className: "GoogleAdWordsSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15177,13 +15935,13 @@ export const DrillSource: msRest.CompositeMapper = {
}
};
-export const CouchbaseSource: msRest.CompositeMapper = {
- serializedName: "CouchbaseSource",
+export const OracleServiceCloudSource: msRest.CompositeMapper = {
+ serializedName: "OracleServiceCloudSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "CouchbaseSource",
+ className: "OracleServiceCloudSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15197,13 +15955,13 @@ export const CouchbaseSource: msRest.CompositeMapper = {
}
};
-export const ConcurSource: msRest.CompositeMapper = {
- serializedName: "ConcurSource",
+export const DynamicsAXSource: msRest.CompositeMapper = {
+ serializedName: "DynamicsAXSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "ConcurSource",
+ className: "DynamicsAXSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15211,19 +15969,25 @@ export const ConcurSource: msRest.CompositeMapper = {
type: {
name: "Object"
}
+ },
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
+ type: {
+ name: "Object"
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const AzurePostgreSqlSource: msRest.CompositeMapper = {
- serializedName: "AzurePostgreSqlSource",
+export const ResponsysSource: msRest.CompositeMapper = {
+ serializedName: "ResponsysSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "AzurePostgreSqlSource",
+ className: "ResponsysSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15237,13 +16001,13 @@ export const AzurePostgreSqlSource: msRest.CompositeMapper = {
}
};
-export const AmazonMWSSource: msRest.CompositeMapper = {
- serializedName: "AmazonMWSSource",
+export const SalesforceMarketingCloudSource: msRest.CompositeMapper = {
+ serializedName: "SalesforceMarketingCloudSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "AmazonMWSSource",
+ className: "SalesforceMarketingCloudSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15257,13 +16021,13 @@ export const AmazonMWSSource: msRest.CompositeMapper = {
}
};
-export const CassandraSource: msRest.CompositeMapper = {
- serializedName: "CassandraSource",
+export const VerticaSource: msRest.CompositeMapper = {
+ serializedName: "VerticaSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "CassandraSource",
+ className: "VerticaSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15271,23 +16035,17 @@ export const CassandraSource: msRest.CompositeMapper = {
type: {
name: "Object"
}
- },
- consistencyLevel: {
- serializedName: "consistencyLevel",
- type: {
- name: "String"
- }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const TeradataPartitionSettings: msRest.CompositeMapper = {
- serializedName: "TeradataPartitionSettings",
+export const NetezzaPartitionSettings: msRest.CompositeMapper = {
+ serializedName: "NetezzaPartitionSettings",
type: {
name: "Composite",
- className: "TeradataPartitionSettings",
+ className: "NetezzaPartitionSettings",
modelProperties: {
partitionColumnName: {
serializedName: "partitionColumnName",
@@ -15311,13 +16069,13 @@ export const TeradataPartitionSettings: msRest.CompositeMapper = {
}
};
-export const TeradataSource: msRest.CompositeMapper = {
- serializedName: "TeradataSource",
+export const NetezzaSource: msRest.CompositeMapper = {
+ serializedName: "NetezzaSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "TeradataSource",
+ className: "NetezzaSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15336,7 +16094,7 @@ export const TeradataSource: msRest.CompositeMapper = {
serializedName: "partitionSettings",
type: {
name: "Composite",
- className: "TeradataPartitionSettings"
+ className: "NetezzaPartitionSettings"
}
}
},
@@ -15344,13 +16102,13 @@ export const TeradataSource: msRest.CompositeMapper = {
}
};
-export const AzureMySqlSource: msRest.CompositeMapper = {
- serializedName: "AzureMySqlSource",
+export const ZohoSource: msRest.CompositeMapper = {
+ serializedName: "ZohoSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "AzureMySqlSource",
+ className: "ZohoSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15364,29 +16122,77 @@ export const AzureMySqlSource: msRest.CompositeMapper = {
}
};
-export const SqlDWSource: msRest.CompositeMapper = {
- serializedName: "SqlDWSource",
+export const XeroSource: msRest.CompositeMapper = {
+ serializedName: "XeroSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SqlDWSource",
+ className: "XeroSource",
modelProperties: {
...TabularSource.type.modelProperties,
- sqlReaderQuery: {
- serializedName: "sqlReaderQuery",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- sqlReaderStoredProcedureName: {
- serializedName: "sqlReaderStoredProcedureName",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SquareSource: msRest.CompositeMapper = {
+ serializedName: "SquareSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SquareSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- storedProcedureParameters: {
- serializedName: "storedProcedureParameters",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SparkSource: msRest.CompositeMapper = {
+ serializedName: "SparkSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SparkSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const ShopifySource: msRest.CompositeMapper = {
+ serializedName: "ShopifySource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "ShopifySource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
@@ -15396,63 +16202,37 @@ export const SqlDWSource: msRest.CompositeMapper = {
}
};
-export const StoredProcedureParameter: msRest.CompositeMapper = {
- serializedName: "StoredProcedureParameter",
+export const ServiceNowSource: msRest.CompositeMapper = {
+ serializedName: "ServiceNowSource",
type: {
name: "Composite",
- className: "StoredProcedureParameter",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "ServiceNowSource",
modelProperties: {
- value: {
- serializedName: "value",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- type: {
- serializedName: "type",
- type: {
- name: "String"
- }
}
- }
+ },
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const SqlMISource: msRest.CompositeMapper = {
- serializedName: "SqlMISource",
+export const QuickBooksSource: msRest.CompositeMapper = {
+ serializedName: "QuickBooksSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SqlMISource",
+ className: "QuickBooksSource",
modelProperties: {
...TabularSource.type.modelProperties,
- sqlReaderQuery: {
- serializedName: "sqlReaderQuery",
- type: {
- name: "Object"
- }
- },
- sqlReaderStoredProcedureName: {
- serializedName: "sqlReaderStoredProcedureName",
- type: {
- name: "Object"
- }
- },
- storedProcedureParameters: {
- serializedName: "storedProcedureParameters",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "Composite",
- className: "StoredProcedureParameter"
- }
- }
- }
- },
- produceAdditionalTypes: {
- serializedName: "produceAdditionalTypes",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
@@ -15462,41 +16242,17 @@ export const SqlMISource: msRest.CompositeMapper = {
}
};
-export const AzureSqlSource: msRest.CompositeMapper = {
- serializedName: "AzureSqlSource",
+export const PrestoSource: msRest.CompositeMapper = {
+ serializedName: "PrestoSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "AzureSqlSource",
+ className: "PrestoSource",
modelProperties: {
...TabularSource.type.modelProperties,
- sqlReaderQuery: {
- serializedName: "sqlReaderQuery",
- type: {
- name: "Object"
- }
- },
- sqlReaderStoredProcedureName: {
- serializedName: "sqlReaderStoredProcedureName",
- type: {
- name: "Object"
- }
- },
- storedProcedureParameters: {
- serializedName: "storedProcedureParameters",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "Composite",
- className: "StoredProcedureParameter"
- }
- }
- }
- },
- produceAdditionalTypes: {
- serializedName: "produceAdditionalTypes",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
@@ -15506,41 +16262,17 @@ export const AzureSqlSource: msRest.CompositeMapper = {
}
};
-export const SqlServerSource: msRest.CompositeMapper = {
- serializedName: "SqlServerSource",
+export const PhoenixSource: msRest.CompositeMapper = {
+ serializedName: "PhoenixSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SqlServerSource",
+ className: "PhoenixSource",
modelProperties: {
...TabularSource.type.modelProperties,
- sqlReaderQuery: {
- serializedName: "sqlReaderQuery",
- type: {
- name: "Object"
- }
- },
- sqlReaderStoredProcedureName: {
- serializedName: "sqlReaderStoredProcedureName",
- type: {
- name: "Object"
- }
- },
- storedProcedureParameters: {
- serializedName: "storedProcedureParameters",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "Composite",
- className: "StoredProcedureParameter"
- }
- }
- }
- },
- produceAdditionalTypes: {
- serializedName: "produceAdditionalTypes",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
@@ -15550,41 +16282,17 @@ export const SqlServerSource: msRest.CompositeMapper = {
}
};
-export const SqlSource: msRest.CompositeMapper = {
- serializedName: "SqlSource",
+export const PaypalSource: msRest.CompositeMapper = {
+ serializedName: "PaypalSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SqlSource",
+ className: "PaypalSource",
modelProperties: {
...TabularSource.type.modelProperties,
- sqlReaderQuery: {
- serializedName: "sqlReaderQuery",
- type: {
- name: "Object"
- }
- },
- sqlReaderStoredProcedureName: {
- serializedName: "sqlReaderStoredProcedureName",
- type: {
- name: "Object"
- }
- },
- storedProcedureParameters: {
- serializedName: "storedProcedureParameters",
- type: {
- name: "Dictionary",
- value: {
- type: {
- name: "Composite",
- className: "StoredProcedureParameter"
- }
- }
- }
- },
- isolationLevel: {
- serializedName: "isolationLevel",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
@@ -15594,96 +16302,59 @@ export const SqlSource: msRest.CompositeMapper = {
}
};
-export const SapTablePartitionSettings: msRest.CompositeMapper = {
- serializedName: "SapTablePartitionSettings",
+export const MarketoSource: msRest.CompositeMapper = {
+ serializedName: "MarketoSource",
type: {
name: "Composite",
- className: "SapTablePartitionSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "MarketoSource",
modelProperties: {
- partitionColumnName: {
- serializedName: "partitionColumnName",
- type: {
- name: "Object"
- }
- },
- partitionUpperBound: {
- serializedName: "partitionUpperBound",
- type: {
- name: "Object"
- }
- },
- partitionLowerBound: {
- serializedName: "partitionLowerBound",
- type: {
- name: "Object"
- }
- },
- maxPartitionsNumber: {
- serializedName: "maxPartitionsNumber",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
}
- }
+ },
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const SapTableSource: msRest.CompositeMapper = {
- serializedName: "SapTableSource",
+export const AzureMariaDBSource: msRest.CompositeMapper = {
+ serializedName: "AzureMariaDBSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SapTableSource",
+ className: "AzureMariaDBSource",
modelProperties: {
...TabularSource.type.modelProperties,
- rowCount: {
- serializedName: "rowCount",
- type: {
- name: "Object"
- }
- },
- rowSkips: {
- serializedName: "rowSkips",
- type: {
- name: "Object"
- }
- },
- rfcTableFields: {
- serializedName: "rfcTableFields",
- type: {
- name: "Object"
- }
- },
- rfcTableOptions: {
- serializedName: "rfcTableOptions",
- type: {
- name: "Object"
- }
- },
- batchSize: {
- serializedName: "batchSize",
- type: {
- name: "Object"
- }
- },
- customRfcReadTableFunctionModule: {
- serializedName: "customRfcReadTableFunctionModule",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- partitionOption: {
- serializedName: "partitionOption",
- type: {
- name: "String"
- }
- },
- partitionSettings: {
- serializedName: "partitionSettings",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const MariaDBSource: msRest.CompositeMapper = {
+ serializedName: "MariaDBSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "MariaDBSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
- name: "Composite",
- className: "SapTablePartitionSettings"
+ name: "Object"
}
}
},
@@ -15691,23 +16362,17 @@ export const SapTableSource: msRest.CompositeMapper = {
}
};
-export const SapOpenHubSource: msRest.CompositeMapper = {
- serializedName: "SapOpenHubSource",
+export const MagentoSource: msRest.CompositeMapper = {
+ serializedName: "MagentoSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SapOpenHubSource",
+ className: "MagentoSource",
modelProperties: {
...TabularSource.type.modelProperties,
- excludeLastRequest: {
- serializedName: "excludeLastRequest",
- type: {
- name: "Object"
- }
- },
- baseRequestId: {
- serializedName: "baseRequestId",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
@@ -15717,29 +16382,33 @@ export const SapOpenHubSource: msRest.CompositeMapper = {
}
};
-export const SapHanaPartitionSettings: msRest.CompositeMapper = {
- serializedName: "SapHanaPartitionSettings",
+export const JiraSource: msRest.CompositeMapper = {
+ serializedName: "JiraSource",
type: {
name: "Composite",
- className: "SapHanaPartitionSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "JiraSource",
modelProperties: {
- partitionColumnName: {
- serializedName: "partitionColumnName",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
}
- }
+ },
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const SapHanaSource: msRest.CompositeMapper = {
- serializedName: "SapHanaSource",
+export const ImpalaSource: msRest.CompositeMapper = {
+ serializedName: "ImpalaSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SapHanaSource",
+ className: "ImpalaSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15747,38 +16416,39 @@ export const SapHanaSource: msRest.CompositeMapper = {
type: {
name: "Object"
}
- },
- packetSize: {
- serializedName: "packetSize",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const HubspotSource: msRest.CompositeMapper = {
+ serializedName: "HubspotSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "HubspotSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- partitionOption: {
- serializedName: "partitionOption",
- type: {
- name: "String"
- }
- },
- partitionSettings: {
- serializedName: "partitionSettings",
- type: {
- name: "Composite",
- className: "SapHanaPartitionSettings"
- }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const SapEccSource: msRest.CompositeMapper = {
- serializedName: "SapEccSource",
+export const HiveSource: msRest.CompositeMapper = {
+ serializedName: "HiveSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SapEccSource",
+ className: "HiveSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15792,13 +16462,13 @@ export const SapEccSource: msRest.CompositeMapper = {
}
};
-export const SapCloudForCustomerSource: msRest.CompositeMapper = {
- serializedName: "SapCloudForCustomerSource",
+export const HBaseSource: msRest.CompositeMapper = {
+ serializedName: "HBaseSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SapCloudForCustomerSource",
+ className: "HBaseSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15812,13 +16482,13 @@ export const SapCloudForCustomerSource: msRest.CompositeMapper = {
}
};
-export const SalesforceSource: msRest.CompositeMapper = {
- serializedName: "SalesforceSource",
+export const GreenplumSource: msRest.CompositeMapper = {
+ serializedName: "GreenplumSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SalesforceSource",
+ className: "GreenplumSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15826,25 +16496,19 @@ export const SalesforceSource: msRest.CompositeMapper = {
type: {
name: "Object"
}
- },
- readBehavior: {
- serializedName: "readBehavior",
- type: {
- name: "String"
- }
}
},
additionalProperties: CopySource.type.additionalProperties
}
};
-export const SapBwSource: msRest.CompositeMapper = {
- serializedName: "SapBwSource",
+export const GoogleBigQuerySource: msRest.CompositeMapper = {
+ serializedName: "GoogleBigQuerySource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SapBwSource",
+ className: "GoogleBigQuerySource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15858,13 +16522,13 @@ export const SapBwSource: msRest.CompositeMapper = {
}
};
-export const SybaseSource: msRest.CompositeMapper = {
- serializedName: "SybaseSource",
+export const EloquaSource: msRest.CompositeMapper = {
+ serializedName: "EloquaSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "SybaseSource",
+ className: "EloquaSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15878,13 +16542,13 @@ export const SybaseSource: msRest.CompositeMapper = {
}
};
-export const PostgreSqlSource: msRest.CompositeMapper = {
- serializedName: "PostgreSqlSource",
+export const DrillSource: msRest.CompositeMapper = {
+ serializedName: "DrillSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "PostgreSqlSource",
+ className: "DrillSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15898,13 +16562,13 @@ export const PostgreSqlSource: msRest.CompositeMapper = {
}
};
-export const MySqlSource: msRest.CompositeMapper = {
- serializedName: "MySqlSource",
+export const CouchbaseSource: msRest.CompositeMapper = {
+ serializedName: "CouchbaseSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "MySqlSource",
+ className: "CouchbaseSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15918,13 +16582,13 @@ export const MySqlSource: msRest.CompositeMapper = {
}
};
-export const OdbcSource: msRest.CompositeMapper = {
- serializedName: "OdbcSource",
+export const ConcurSource: msRest.CompositeMapper = {
+ serializedName: "ConcurSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "OdbcSource",
+ className: "ConcurSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15938,13 +16602,13 @@ export const OdbcSource: msRest.CompositeMapper = {
}
};
-export const Db2Source: msRest.CompositeMapper = {
- serializedName: "Db2Source",
+export const AzurePostgreSqlSource: msRest.CompositeMapper = {
+ serializedName: "AzurePostgreSqlSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "Db2Source",
+ className: "AzurePostgreSqlSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15958,13 +16622,13 @@ export const Db2Source: msRest.CompositeMapper = {
}
};
-export const InformixSource: msRest.CompositeMapper = {
- serializedName: "InformixSource",
+export const AmazonMWSSource: msRest.CompositeMapper = {
+ serializedName: "AmazonMWSSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "InformixSource",
+ className: "AmazonMWSSource",
modelProperties: {
...TabularSource.type.modelProperties,
query: {
@@ -15978,25 +16642,25 @@ export const InformixSource: msRest.CompositeMapper = {
}
};
-export const AzureTableSource: msRest.CompositeMapper = {
- serializedName: "AzureTableSource",
+export const CassandraSource: msRest.CompositeMapper = {
+ serializedName: "CassandraSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "AzureTableSource",
+ className: "CassandraSource",
modelProperties: {
...TabularSource.type.modelProperties,
- azureTableSourceQuery: {
- serializedName: "azureTableSourceQuery",
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
},
- azureTableSourceIgnoreTableNotFound: {
- serializedName: "azureTableSourceIgnoreTableNotFound",
+ consistencyLevel: {
+ serializedName: "consistencyLevel",
type: {
- name: "Object"
+ name: "String"
}
}
},
@@ -16004,641 +16668,736 @@ export const AzureTableSource: msRest.CompositeMapper = {
}
};
-export const StoreReadSettings: msRest.CompositeMapper = {
- serializedName: "StoreReadSettings",
+export const TeradataPartitionSettings: msRest.CompositeMapper = {
+ serializedName: "TeradataPartitionSettings",
type: {
name: "Composite",
- polymorphicDiscriminator: {
- serializedName: "type",
- clientName: "type"
- },
- uberParent: "StoreReadSettings",
- className: "StoreReadSettings",
+ className: "TeradataPartitionSettings",
modelProperties: {
- maxConcurrentConnections: {
- serializedName: "maxConcurrentConnections",
+ partitionColumnName: {
+ serializedName: "partitionColumnName",
type: {
name: "Object"
}
},
- type: {
- required: true,
- serializedName: "type",
+ partitionUpperBound: {
+ serializedName: "partitionUpperBound",
type: {
- name: "String"
+ name: "Object"
+ }
+ },
+ partitionLowerBound: {
+ serializedName: "partitionLowerBound",
+ type: {
+ name: "Object"
}
- }
- },
- additionalProperties: {
- type: {
- name: "Object"
}
}
}
};
-export const HdfsReadSettings: msRest.CompositeMapper = {
- serializedName: "HdfsReadSettings",
+export const TeradataSource: msRest.CompositeMapper = {
+ serializedName: "TeradataSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "HdfsReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "TeradataSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
},
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
+ partitionOption: {
+ serializedName: "partitionOption",
type: {
- name: "Object"
+ name: "String"
}
},
- wildcardFileName: {
- serializedName: "wildcardFileName",
+ partitionSettings: {
+ serializedName: "partitionSettings",
+ type: {
+ name: "Composite",
+ className: "TeradataPartitionSettings"
+ }
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const AzureMySqlSource: msRest.CompositeMapper = {
+ serializedName: "AzureMySqlSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "AzureMySqlSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SqlDWSource: msRest.CompositeMapper = {
+ serializedName: "SqlDWSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SqlDWSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ sqlReaderQuery: {
+ serializedName: "sqlReaderQuery",
type: {
name: "Object"
}
},
- fileListPath: {
- serializedName: "fileListPath",
+ sqlReaderStoredProcedureName: {
+ serializedName: "sqlReaderStoredProcedureName",
type: {
name: "Object"
}
},
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
- type: {
- name: "Boolean"
- }
- },
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
+ storedProcedureParameters: {
+ serializedName: "storedProcedureParameters",
type: {
name: "Object"
}
- },
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const StoredProcedureParameter: msRest.CompositeMapper = {
+ serializedName: "StoredProcedureParameter",
+ type: {
+ name: "Composite",
+ className: "StoredProcedureParameter",
+ modelProperties: {
+ value: {
+ serializedName: "value",
type: {
name: "Object"
}
},
- distcpSettings: {
- serializedName: "distcpSettings",
+ type: {
+ serializedName: "type",
type: {
- name: "Composite",
- className: "DistcpSettings"
+ name: "String"
}
}
- },
- additionalProperties: StoreReadSettings.type.additionalProperties
+ }
}
};
-export const HttpReadSettings: msRest.CompositeMapper = {
- serializedName: "HttpReadSettings",
+export const SqlMISource: msRest.CompositeMapper = {
+ serializedName: "SqlMISource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "HttpReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SqlMISource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- requestMethod: {
- serializedName: "requestMethod",
+ ...TabularSource.type.modelProperties,
+ sqlReaderQuery: {
+ serializedName: "sqlReaderQuery",
type: {
name: "Object"
}
},
- requestBody: {
- serializedName: "requestBody",
+ sqlReaderStoredProcedureName: {
+ serializedName: "sqlReaderStoredProcedureName",
type: {
name: "Object"
}
},
- additionalHeaders: {
- serializedName: "additionalHeaders",
+ storedProcedureParameters: {
+ serializedName: "storedProcedureParameters",
type: {
- name: "Object"
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "Composite",
+ className: "StoredProcedureParameter"
+ }
+ }
}
},
- requestTimeout: {
- serializedName: "requestTimeout",
+ produceAdditionalTypes: {
+ serializedName: "produceAdditionalTypes",
type: {
name: "Object"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const SftpReadSettings: msRest.CompositeMapper = {
- serializedName: "SftpReadSettings",
+export const AzureSqlSource: msRest.CompositeMapper = {
+ serializedName: "AzureSqlSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "SftpReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "AzureSqlSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
- type: {
- name: "Object"
- }
- },
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
- type: {
- name: "Object"
- }
- },
- wildcardFileName: {
- serializedName: "wildcardFileName",
+ ...TabularSource.type.modelProperties,
+ sqlReaderQuery: {
+ serializedName: "sqlReaderQuery",
type: {
name: "Object"
}
},
- fileListPath: {
- serializedName: "fileListPath",
+ sqlReaderStoredProcedureName: {
+ serializedName: "sqlReaderStoredProcedureName",
type: {
name: "Object"
}
},
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
+ storedProcedureParameters: {
+ serializedName: "storedProcedureParameters",
type: {
- name: "Object"
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "Composite",
+ className: "StoredProcedureParameter"
+ }
+ }
}
},
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ produceAdditionalTypes: {
+ serializedName: "produceAdditionalTypes",
type: {
name: "Object"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const FtpReadSettings: msRest.CompositeMapper = {
- serializedName: "FtpReadSettings",
+export const SqlServerSource: msRest.CompositeMapper = {
+ serializedName: "SqlServerSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "FtpReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SqlServerSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ ...TabularSource.type.modelProperties,
+ sqlReaderQuery: {
+ serializedName: "sqlReaderQuery",
type: {
name: "Object"
}
},
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
+ sqlReaderStoredProcedureName: {
+ serializedName: "sqlReaderStoredProcedureName",
type: {
name: "Object"
}
},
- wildcardFileName: {
- serializedName: "wildcardFileName",
+ storedProcedureParameters: {
+ serializedName: "storedProcedureParameters",
type: {
- name: "Object"
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "Composite",
+ className: "StoredProcedureParameter"
+ }
+ }
}
},
- fileListPath: {
- serializedName: "fileListPath",
+ produceAdditionalTypes: {
+ serializedName: "produceAdditionalTypes",
type: {
name: "Object"
}
- },
- useBinaryTransfer: {
- serializedName: "useBinaryTransfer",
- type: {
- name: "Boolean"
- }
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const GoogleCloudStorageReadSettings: msRest.CompositeMapper = {
- serializedName: "GoogleCloudStorageReadSettings",
+export const SqlSource: msRest.CompositeMapper = {
+ serializedName: "SqlSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "GoogleCloudStorageReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SqlSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ ...TabularSource.type.modelProperties,
+ sqlReaderQuery: {
+ serializedName: "sqlReaderQuery",
type: {
name: "Object"
}
},
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
+ sqlReaderStoredProcedureName: {
+ serializedName: "sqlReaderStoredProcedureName",
type: {
name: "Object"
}
},
- wildcardFileName: {
- serializedName: "wildcardFileName",
+ storedProcedureParameters: {
+ serializedName: "storedProcedureParameters",
type: {
- name: "Object"
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "Composite",
+ className: "StoredProcedureParameter"
+ }
+ }
}
},
- prefix: {
- serializedName: "prefix",
+ isolationLevel: {
+ serializedName: "isolationLevel",
type: {
name: "Object"
}
- },
- fileListPath: {
- serializedName: "fileListPath",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SapTablePartitionSettings: msRest.CompositeMapper = {
+ serializedName: "SapTablePartitionSettings",
+ type: {
+ name: "Composite",
+ className: "SapTablePartitionSettings",
+ modelProperties: {
+ partitionColumnName: {
+ serializedName: "partitionColumnName",
type: {
name: "Object"
}
},
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
+ partitionUpperBound: {
+ serializedName: "partitionUpperBound",
type: {
- name: "Boolean"
+ name: "Object"
}
},
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
+ partitionLowerBound: {
+ serializedName: "partitionLowerBound",
type: {
name: "Object"
}
},
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ maxPartitionsNumber: {
+ serializedName: "maxPartitionsNumber",
type: {
name: "Object"
}
}
- },
- additionalProperties: StoreReadSettings.type.additionalProperties
+ }
}
};
-export const AzureFileStorageReadSettings: msRest.CompositeMapper = {
- serializedName: "AzureFileStorageReadSettings",
+export const SapTableSource: msRest.CompositeMapper = {
+ serializedName: "SapTableSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "AzureFileStorageReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SapTableSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ ...TabularSource.type.modelProperties,
+ rowCount: {
+ serializedName: "rowCount",
type: {
name: "Object"
}
},
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
+ rowSkips: {
+ serializedName: "rowSkips",
type: {
name: "Object"
}
},
- wildcardFileName: {
- serializedName: "wildcardFileName",
+ rfcTableFields: {
+ serializedName: "rfcTableFields",
type: {
name: "Object"
}
},
- fileListPath: {
- serializedName: "fileListPath",
+ rfcTableOptions: {
+ serializedName: "rfcTableOptions",
type: {
name: "Object"
}
},
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
+ batchSize: {
+ serializedName: "batchSize",
type: {
- name: "Boolean"
+ name: "Object"
}
},
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
+ customRfcReadTableFunctionModule: {
+ serializedName: "customRfcReadTableFunctionModule",
type: {
name: "Object"
}
},
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ partitionOption: {
+ serializedName: "partitionOption",
type: {
- name: "Object"
+ name: "String"
+ }
+ },
+ partitionSettings: {
+ serializedName: "partitionSettings",
+ type: {
+ name: "Composite",
+ className: "SapTablePartitionSettings"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const FileServerReadSettings: msRest.CompositeMapper = {
- serializedName: "FileServerReadSettings",
+export const SapOpenHubSource: msRest.CompositeMapper = {
+ serializedName: "SapOpenHubSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "FileServerReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SapOpenHubSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ ...TabularSource.type.modelProperties,
+ excludeLastRequest: {
+ serializedName: "excludeLastRequest",
type: {
name: "Object"
}
},
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
+ baseRequestId: {
+ serializedName: "baseRequestId",
type: {
name: "Object"
}
- },
- wildcardFileName: {
- serializedName: "wildcardFileName",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SapHanaPartitionSettings: msRest.CompositeMapper = {
+ serializedName: "SapHanaPartitionSettings",
+ type: {
+ name: "Composite",
+ className: "SapHanaPartitionSettings",
+ modelProperties: {
+ partitionColumnName: {
+ serializedName: "partitionColumnName",
type: {
name: "Object"
}
- },
- fileListPath: {
- serializedName: "fileListPath",
+ }
+ }
+ }
+};
+
+export const SapHanaSource: msRest.CompositeMapper = {
+ serializedName: "SapHanaSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SapHanaSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
},
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
+ packetSize: {
+ serializedName: "packetSize",
type: {
- name: "Boolean"
+ name: "Object"
}
},
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
+ partitionOption: {
+ serializedName: "partitionOption",
type: {
- name: "Object"
+ name: "String"
}
},
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ partitionSettings: {
+ serializedName: "partitionSettings",
type: {
- name: "Object"
+ name: "Composite",
+ className: "SapHanaPartitionSettings"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const AmazonS3ReadSettings: msRest.CompositeMapper = {
- serializedName: "AmazonS3ReadSettings",
+export const SapEccSource: msRest.CompositeMapper = {
+ serializedName: "SapEccSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "AmazonS3ReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SapEccSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
},
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
type: {
name: "Object"
}
- },
- wildcardFileName: {
- serializedName: "wildcardFileName",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SapCloudForCustomerSource: msRest.CompositeMapper = {
+ serializedName: "SapCloudForCustomerSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SapCloudForCustomerSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
},
- prefix: {
- serializedName: "prefix",
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
type: {
name: "Object"
}
- },
- fileListPath: {
- serializedName: "fileListPath",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SalesforceSource: msRest.CompositeMapper = {
+ serializedName: "SalesforceSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SalesforceSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
},
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
+ readBehavior: {
+ serializedName: "readBehavior",
type: {
- name: "Boolean"
+ name: "String"
}
- },
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SapBwSource: msRest.CompositeMapper = {
+ serializedName: "SapBwSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SapBwSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const SybaseSource: msRest.CompositeMapper = {
+ serializedName: "SybaseSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "SybaseSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
+ type: {
+ name: "Object"
+ }
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const PostgreSqlSource: msRest.CompositeMapper = {
+ serializedName: "PostgreSqlSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "PostgreSqlSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const AzureDataLakeStoreReadSettings: msRest.CompositeMapper = {
- serializedName: "AzureDataLakeStoreReadSettings",
+export const MySqlSource: msRest.CompositeMapper = {
+ serializedName: "MySqlSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "AzureDataLakeStoreReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "MySqlSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
- type: {
- name: "Object"
- }
- },
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
- type: {
- name: "Object"
- }
- },
- wildcardFileName: {
- serializedName: "wildcardFileName",
- type: {
- name: "Object"
- }
- },
- fileListPath: {
- serializedName: "fileListPath",
- type: {
- name: "Object"
- }
- },
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
- type: {
- name: "Boolean"
- }
- },
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
- type: {
- name: "Object"
- }
- },
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const AzureBlobFSReadSettings: msRest.CompositeMapper = {
- serializedName: "AzureBlobFSReadSettings",
+export const OdbcSource: msRest.CompositeMapper = {
+ serializedName: "OdbcSource",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "AzureBlobFSReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "OdbcSource",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
- type: {
- name: "Object"
- }
- },
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
- type: {
- name: "Object"
- }
- },
- wildcardFileName: {
- serializedName: "wildcardFileName",
- type: {
- name: "Object"
- }
- },
- fileListPath: {
- serializedName: "fileListPath",
- type: {
- name: "Object"
- }
- },
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
- type: {
- name: "Boolean"
- }
- },
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
- type: {
- name: "Object"
- }
- },
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
-export const AzureBlobStorageReadSettings: msRest.CompositeMapper = {
- serializedName: "AzureBlobStorageReadSettings",
+export const Db2Source: msRest.CompositeMapper = {
+ serializedName: "Db2Source",
type: {
name: "Composite",
- polymorphicDiscriminator: StoreReadSettings.type.polymorphicDiscriminator,
- uberParent: "StoreReadSettings",
- className: "AzureBlobStorageReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "Db2Source",
modelProperties: {
- ...StoreReadSettings.type.modelProperties,
- recursive: {
- serializedName: "recursive",
- type: {
- name: "Object"
- }
- },
- wildcardFolderPath: {
- serializedName: "wildcardFolderPath",
- type: {
- name: "Object"
- }
- },
- wildcardFileName: {
- serializedName: "wildcardFileName",
- type: {
- name: "Object"
- }
- },
- prefix: {
- serializedName: "prefix",
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- fileListPath: {
- serializedName: "fileListPath",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const InformixSource: msRest.CompositeMapper = {
+ serializedName: "InformixSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "InformixSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ query: {
+ serializedName: "query",
type: {
name: "Object"
}
- },
- enablePartitionDiscovery: {
- serializedName: "enablePartitionDiscovery",
- type: {
- name: "Boolean"
- }
- },
- modifiedDatetimeStart: {
- serializedName: "modifiedDatetimeStart",
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
+export const AzureTableSource: msRest.CompositeMapper = {
+ serializedName: "AzureTableSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "AzureTableSource",
+ modelProperties: {
+ ...TabularSource.type.modelProperties,
+ azureTableSourceQuery: {
+ serializedName: "azureTableSourceQuery",
type: {
name: "Object"
}
},
- modifiedDatetimeEnd: {
- serializedName: "modifiedDatetimeEnd",
+ azureTableSourceIgnoreTableNotFound: {
+ serializedName: "azureTableSourceIgnoreTableNotFound",
type: {
name: "Object"
}
}
},
- additionalProperties: StoreReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
@@ -16662,6 +17421,14 @@ export const BinarySource: msRest.CompositeMapper = {
}
}
}
+ },
+ formatSettings: {
+ serializedName: "formatSettings",
+ type: {
+ name: "Composite",
+ className: "BinaryReadSettings",
+ additionalProperties: FormatReadSettings.type.additionalProperties
+ }
}
},
additionalProperties: CopySource.type.additionalProperties
@@ -16706,13 +17473,13 @@ export const OrcSource: msRest.CompositeMapper = {
}
};
-export const JsonSource: msRest.CompositeMapper = {
- serializedName: "JsonSource",
+export const XmlSource: msRest.CompositeMapper = {
+ serializedName: "XmlSource",
type: {
name: "Composite",
polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
uberParent: "CopySource",
- className: "JsonSource",
+ className: "XmlSource",
modelProperties: {
...CopySource.type.modelProperties,
storeSettings: {
@@ -16727,6 +17494,14 @@ export const JsonSource: msRest.CompositeMapper = {
}
}
},
+ formatSettings: {
+ serializedName: "formatSettings",
+ type: {
+ name: "Composite",
+ className: "XmlReadSettings",
+ additionalProperties: FormatReadSettings.type.additionalProperties
+ }
+ },
additionalColumns: {
serializedName: "additionalColumns",
type: {
@@ -16744,50 +17519,49 @@ export const JsonSource: msRest.CompositeMapper = {
}
};
-export const FormatReadSettings: msRest.CompositeMapper = {
- serializedName: "FormatReadSettings",
- type: {
- name: "Composite",
- polymorphicDiscriminator: {
- serializedName: "type",
- clientName: "type"
- },
- uberParent: "FormatReadSettings",
- className: "FormatReadSettings",
- modelProperties: {
- type: {
- required: true,
- serializedName: "type",
- type: {
- name: "String"
- }
- }
- },
- additionalProperties: {
- type: {
- name: "Object"
- }
- }
- }
-};
-
-export const DelimitedTextReadSettings: msRest.CompositeMapper = {
- serializedName: "DelimitedTextReadSettings",
+export const JsonSource: msRest.CompositeMapper = {
+ serializedName: "JsonSource",
type: {
name: "Composite",
- polymorphicDiscriminator: FormatReadSettings.type.polymorphicDiscriminator,
- uberParent: "FormatReadSettings",
- className: "DelimitedTextReadSettings",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "JsonSource",
modelProperties: {
- ...FormatReadSettings.type.modelProperties,
- skipLineCount: {
- serializedName: "skipLineCount",
+ ...CopySource.type.modelProperties,
+ storeSettings: {
+ serializedName: "storeSettings",
type: {
- name: "Object"
+ name: "Composite",
+ className: "StoreReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ formatSettings: {
+ serializedName: "formatSettings",
+ type: {
+ name: "Composite",
+ className: "JsonReadSettings",
+ additionalProperties: FormatReadSettings.type.additionalProperties
+ }
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
}
}
},
- additionalProperties: FormatReadSettings.type.additionalProperties
+ additionalProperties: CopySource.type.additionalProperties
}
};
@@ -16875,6 +17649,44 @@ export const ParquetSource: msRest.CompositeMapper = {
}
};
+export const ExcelSource: msRest.CompositeMapper = {
+ serializedName: "ExcelSource",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySource.type.polymorphicDiscriminator,
+ uberParent: "CopySource",
+ className: "ExcelSource",
+ modelProperties: {
+ ...CopySource.type.modelProperties,
+ storeSettings: {
+ serializedName: "storeSettings",
+ type: {
+ name: "Composite",
+ className: "StoreReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ additionalColumns: {
+ serializedName: "additionalColumns",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "AdditionalColumns"
+ }
+ }
+ }
+ }
+ },
+ additionalProperties: CopySource.type.additionalProperties
+ }
+};
+
export const AvroSource: msRest.CompositeMapper = {
serializedName: "AvroSource",
type: {
@@ -17062,6 +17874,18 @@ export const DeleteActivity: msRest.CompositeMapper = {
name: "Composite",
className: "DatasetReference"
}
+ },
+ storeSettings: {
+ serializedName: "typeProperties.storeSettings",
+ type: {
+ name: "Composite",
+ className: "StoreReadSettings",
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
}
},
additionalProperties: Activity.type.additionalProperties
@@ -17410,6 +18234,13 @@ export const SSISPackageLocation: msRest.CompositeMapper = {
name: "Object"
}
},
+ configurationAccessCredential: {
+ serializedName: "typeProperties.configurationAccessCredential",
+ type: {
+ name: "Composite",
+ className: "SSISAccessCredential"
+ }
+ },
packageName: {
serializedName: "typeProperties.packageName",
type: {
@@ -18512,6 +19343,97 @@ export const OracleSink: msRest.CompositeMapper = {
}
};
+export const ImportSettings: msRest.CompositeMapper = {
+ serializedName: "ImportSettings",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: {
+ serializedName: "type",
+ clientName: "type"
+ },
+ uberParent: "ImportSettings",
+ className: "ImportSettings",
+ modelProperties: {
+ type: {
+ required: true,
+ serializedName: "type",
+ type: {
+ name: "String"
+ }
+ }
+ },
+ additionalProperties: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+};
+
+export const SnowflakeImportCopyCommand: msRest.CompositeMapper = {
+ serializedName: "SnowflakeImportCopyCommand",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: ImportSettings.type.polymorphicDiscriminator,
+ uberParent: "ImportSettings",
+ className: "SnowflakeImportCopyCommand",
+ modelProperties: {
+ ...ImportSettings.type.modelProperties,
+ additionalCopyOptions: {
+ serializedName: "additionalCopyOptions",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ },
+ additionalFormatOptions: {
+ serializedName: "additionalFormatOptions",
+ type: {
+ name: "Dictionary",
+ value: {
+ type: {
+ name: "Object"
+ }
+ }
+ }
+ }
+ },
+ additionalProperties: ImportSettings.type.additionalProperties
+ }
+};
+
+export const SnowflakeSink: msRest.CompositeMapper = {
+ serializedName: "SnowflakeSink",
+ type: {
+ name: "Composite",
+ polymorphicDiscriminator: CopySink.type.polymorphicDiscriminator,
+ uberParent: "CopySink",
+ className: "SnowflakeSink",
+ modelProperties: {
+ ...CopySink.type.modelProperties,
+ preCopyScript: {
+ serializedName: "preCopyScript",
+ type: {
+ name: "Object"
+ }
+ },
+ importSettings: {
+ serializedName: "importSettings",
+ type: {
+ name: "Composite",
+ className: "SnowflakeImportCopyCommand",
+ additionalProperties: ImportSettings.type.additionalProperties
+ }
+ }
+ },
+ additionalProperties: CopySink.type.additionalProperties
+ }
+};
+
export const DWCopyCommandDefaultValue: msRest.CompositeMapper = {
serializedName: "DWCopyCommandDefaultValue",
type: {
@@ -19382,6 +20304,12 @@ export const SapCloudForCustomerSink: msRest.CompositeMapper = {
type: {
name: "String"
}
+ },
+ httpRequestTimeout: {
+ serializedName: "httpRequestTimeout",
+ type: {
+ name: "Object"
+ }
}
},
additionalProperties: CopySink.type.additionalProperties
@@ -20851,6 +21779,53 @@ export const SelfHostedIntegrationRuntime: msRest.CompositeMapper = {
}
};
+export const EntityReference: msRest.CompositeMapper = {
+ serializedName: "EntityReference",
+ type: {
+ name: "Composite",
+ className: "EntityReference",
+ modelProperties: {
+ type: {
+ serializedName: "type",
+ type: {
+ name: "String"
+ }
+ },
+ referenceName: {
+ serializedName: "referenceName",
+ type: {
+ name: "String"
+ }
+ }
+ }
+ }
+};
+
+export const PackageStore: msRest.CompositeMapper = {
+ serializedName: "PackageStore",
+ type: {
+ name: "Composite",
+ className: "PackageStore",
+ modelProperties: {
+ name: {
+ required: true,
+ serializedName: "name",
+ type: {
+ name: "String"
+ }
+ },
+ packageStoreLinkedService: {
+ required: true,
+ serializedName: "packageStoreLinkedService",
+ type: {
+ name: "Composite",
+ className: "EntityReference"
+ }
+ }
+ }
+ }
+};
+
export const CustomSetupBase: msRest.CompositeMapper = {
serializedName: "CustomSetupBase",
type: {
@@ -20962,28 +21937,6 @@ export const CmdkeySetup: msRest.CompositeMapper = {
}
};
-export const EntityReference: msRest.CompositeMapper = {
- serializedName: "EntityReference",
- type: {
- name: "Composite",
- className: "EntityReference",
- modelProperties: {
- type: {
- serializedName: "type",
- type: {
- name: "String"
- }
- },
- referenceName: {
- serializedName: "referenceName",
- type: {
- name: "String"
- }
- }
- }
- }
-};
-
export const IntegrationRuntimeDataProxyProperties: msRest.CompositeMapper = {
serializedName: "IntegrationRuntimeDataProxyProperties",
type: {
@@ -21136,6 +22089,18 @@ export const IntegrationRuntimeSsisProperties: msRest.CompositeMapper = {
}
}
}
+ },
+ packageStores: {
+ serializedName: "packageStores",
+ type: {
+ name: "Sequence",
+ element: {
+ type: {
+ name: "Composite",
+ className: "PackageStore"
+ }
+ }
+ }
}
},
additionalProperties: {
@@ -22243,6 +23208,8 @@ export const discriminators = {
'FactoryRepoConfiguration.FactoryGitHubConfiguration' : FactoryGitHubConfiguration,
'DataFlow' : DataFlow,
'DataFlow.MappingDataFlow' : MappingDataFlow,
+ 'LinkedService.SharePointOnlineList' : SharePointOnlineListLinkedService,
+ 'LinkedService.Snowflake' : SnowflakeLinkedService,
'LinkedService.AzureFunction' : AzureFunctionLinkedService,
'LinkedService.AzureDataExplorer' : AzureDataExplorerLinkedService,
'LinkedService.SapTable' : SapTableLinkedService,
@@ -22341,6 +23308,8 @@ export const discriminators = {
'LinkedService.AzureTableStorage' : AzureTableStorageLinkedService,
'LinkedService.AzureBlobStorage' : AzureBlobStorageLinkedService,
'LinkedService.AzureStorage' : AzureStorageLinkedService,
+ 'Dataset.SharePointOnlineListResource' : SharePointOnlineListResourceDataset,
+ 'Dataset.SnowflakeTable' : SnowflakeDataset,
'Dataset.GoogleAdWordsObject' : GoogleAdWordsObjectDataset,
'Dataset.AzureDataExplorerTable' : AzureDataExplorerTableDataset,
'Dataset.OracleServiceCloudObject' : OracleServiceCloudObjectDataset,
@@ -22446,9 +23415,11 @@ export const discriminators = {
'DatasetLocation' : DatasetLocation,
'Dataset.Binary' : BinaryDataset,
'Dataset.Orc' : OrcDataset,
+ 'Dataset.Xml' : XmlDataset,
'Dataset.Json' : JsonDataset,
'Dataset.DelimitedText' : DelimitedTextDataset,
'Dataset.Parquet' : ParquetDataset,
+ 'Dataset.Excel' : ExcelDataset,
'Dataset.Avro' : AvroDataset,
'Dataset.AmazonS3Object' : AmazonS3Dataset,
'Trigger.ChainingTrigger' : ChainingTrigger,
@@ -22471,8 +23442,31 @@ export const discriminators = {
'Activity.AzureMLExecutePipeline' : AzureMLExecutePipelineActivity,
'Activity.AzureMLUpdateResource' : AzureMLUpdateResourceActivity,
'Activity.AzureMLBatchExecution' : AzureMLBatchExecutionActivity,
+ 'CompressionReadSettings.ZipDeflateReadSettings' : ZipDeflateReadSettings,
+ 'CompressionReadSettings' : CompressionReadSettings,
+ 'FormatReadSettings.BinaryReadSettings' : BinaryReadSettings,
+ 'FormatReadSettings.XmlReadSettings' : XmlReadSettings,
+ 'FormatReadSettings.JsonReadSettings' : JsonReadSettings,
+ 'FormatReadSettings.DelimitedTextReadSettings' : DelimitedTextReadSettings,
+ 'FormatReadSettings' : FormatReadSettings,
+ 'StoreReadSettings.HdfsReadSettings' : HdfsReadSettings,
+ 'StoreReadSettings.HttpReadSettings' : HttpReadSettings,
+ 'StoreReadSettings.SftpReadSettings' : SftpReadSettings,
+ 'StoreReadSettings.FtpReadSettings' : FtpReadSettings,
+ 'StoreReadSettings.GoogleCloudStorageReadSettings' : GoogleCloudStorageReadSettings,
+ 'StoreReadSettings.AzureFileStorageReadSettings' : AzureFileStorageReadSettings,
+ 'StoreReadSettings.FileServerReadSettings' : FileServerReadSettings,
+ 'StoreReadSettings.AmazonS3ReadSettings' : AmazonS3ReadSettings,
+ 'StoreReadSettings.AzureDataLakeStoreReadSettings' : AzureDataLakeStoreReadSettings,
+ 'StoreReadSettings.AzureBlobFSReadSettings' : AzureBlobFSReadSettings,
+ 'StoreReadSettings.AzureBlobStorageReadSettings' : AzureBlobStorageReadSettings,
+ 'StoreReadSettings' : StoreReadSettings,
'Activity.GetMetadata' : GetMetadataActivity,
'Activity.WebActivity' : WebActivity,
+ 'CopySource.SharePointOnlineListSource' : SharePointOnlineListSource,
+ 'ExportSettings' : ExportSettings,
+ 'ExportSettings.SnowflakeExportCopyCommand' : SnowflakeExportCopyCommand,
+ 'CopySource.SnowflakeSource' : SnowflakeSource,
'CopySource.HttpSource' : HttpSource,
'CopySource.AzureBlobFSSource' : AzureBlobFSSource,
'CopySource.AzureDataLakeStoreSource' : AzureDataLakeStoreSource,
@@ -22554,25 +23548,13 @@ export const discriminators = {
'CopySource.InformixSource' : InformixSource,
'CopySource.AzureTableSource' : AzureTableSource,
'CopySource.TabularSource' : TabularSource,
- 'StoreReadSettings.HdfsReadSettings' : HdfsReadSettings,
- 'StoreReadSettings.HttpReadSettings' : HttpReadSettings,
- 'StoreReadSettings.SftpReadSettings' : SftpReadSettings,
- 'StoreReadSettings.FtpReadSettings' : FtpReadSettings,
- 'StoreReadSettings.GoogleCloudStorageReadSettings' : GoogleCloudStorageReadSettings,
- 'StoreReadSettings.AzureFileStorageReadSettings' : AzureFileStorageReadSettings,
- 'StoreReadSettings.FileServerReadSettings' : FileServerReadSettings,
- 'StoreReadSettings.AmazonS3ReadSettings' : AmazonS3ReadSettings,
- 'StoreReadSettings.AzureDataLakeStoreReadSettings' : AzureDataLakeStoreReadSettings,
- 'StoreReadSettings.AzureBlobFSReadSettings' : AzureBlobFSReadSettings,
- 'StoreReadSettings.AzureBlobStorageReadSettings' : AzureBlobStorageReadSettings,
- 'StoreReadSettings' : StoreReadSettings,
'CopySource.BinarySource' : BinarySource,
'CopySource.OrcSource' : OrcSource,
+ 'CopySource.XmlSource' : XmlSource,
'CopySource.JsonSource' : JsonSource,
- 'FormatReadSettings' : FormatReadSettings,
- 'FormatReadSettings.DelimitedTextReadSettings' : DelimitedTextReadSettings,
'CopySource.DelimitedTextSource' : DelimitedTextSource,
'CopySource.ParquetSource' : ParquetSource,
+ 'CopySource.ExcelSource' : ExcelSource,
'CopySource.AvroSource' : AvroSource,
'CopySource' : CopySource,
'Activity.Lookup' : LookupActivity,
@@ -22600,6 +23582,9 @@ export const discriminators = {
'CopySink.AzureBlobFSSink' : AzureBlobFSSink,
'CopySink.AzureDataLakeStoreSink' : AzureDataLakeStoreSink,
'CopySink.OracleSink' : OracleSink,
+ 'ImportSettings' : ImportSettings,
+ 'ImportSettings.SnowflakeImportCopyCommand' : SnowflakeImportCopyCommand,
+ 'CopySink.SnowflakeSink' : SnowflakeSink,
'CopySink.SqlDWSink' : SqlDWSink,
'CopySink.SqlMISink' : SqlMISink,
'CopySink.AzureSqlSink' : AzureSqlSink,
diff --git a/sdk/datafactory/arm-datafactory/src/models/pipelinesMappers.ts b/sdk/datafactory/arm-datafactory/src/models/pipelinesMappers.ts
index 1b25f3169331..84ef625cfcc5 100644
--- a/sdk/datafactory/arm-datafactory/src/models/pipelinesMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/pipelinesMappers.ts
@@ -98,6 +98,7 @@ export {
AzureTableStorageLinkedService,
BaseResource,
BinaryDataset,
+ BinaryReadSettings,
BinarySink,
BinarySource,
BlobEventsTrigger,
@@ -115,6 +116,7 @@ export {
CommonDataServiceForAppsSink,
CommonDataServiceForAppsSource,
ComponentSetup,
+ CompressionReadSettings,
ConcurLinkedService,
ConcurObjectDataset,
ConcurSource,
@@ -196,11 +198,14 @@ export {
EloquaSource,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
ExecuteDataFlowActivity,
ExecuteDataFlowActivityTypePropertiesCompute,
ExecutePipelineActivity,
ExecuteSSISPackageActivity,
ExecutionActivity,
+ ExportSettings,
Expression,
Factory,
FactoryGitHubConfiguration,
@@ -263,6 +268,7 @@ export {
ImpalaLinkedService,
ImpalaObjectDataset,
ImpalaSource,
+ ImportSettings,
InformixLinkedService,
InformixSink,
InformixSource,
@@ -282,6 +288,7 @@ export {
JiraSource,
JsonDataset,
JsonFormat,
+ JsonReadSettings,
JsonSink,
JsonSource,
JsonWriteSettings,
@@ -345,6 +352,7 @@ export {
OrcFormat,
OrcSink,
OrcSource,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -432,10 +440,19 @@ export {
SftpReadSettings,
SftpServerLinkedService,
SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
ShopifyLinkedService,
ShopifyObjectDataset,
ShopifySource,
SkipErrorFile,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
SparkLinkedService,
SparkObjectDataset,
SparkSource,
@@ -505,6 +522,10 @@ export {
XeroLinkedService,
XeroObjectDataset,
XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZipDeflateReadSettings,
ZohoLinkedService,
ZohoObjectDataset,
ZohoSource
diff --git a/sdk/datafactory/arm-datafactory/src/models/triggersMappers.ts b/sdk/datafactory/arm-datafactory/src/models/triggersMappers.ts
index c6cb571e82c9..a3ae2add8b70 100644
--- a/sdk/datafactory/arm-datafactory/src/models/triggersMappers.ts
+++ b/sdk/datafactory/arm-datafactory/src/models/triggersMappers.ts
@@ -98,6 +98,7 @@ export {
AzureTableStorageLinkedService,
BaseResource,
BinaryDataset,
+ BinaryReadSettings,
BinarySink,
BinarySource,
BlobEventsTrigger,
@@ -115,6 +116,7 @@ export {
CommonDataServiceForAppsSink,
CommonDataServiceForAppsSource,
ComponentSetup,
+ CompressionReadSettings,
ConcurLinkedService,
ConcurObjectDataset,
ConcurSource,
@@ -195,11 +197,14 @@ export {
EloquaSource,
EntityReference,
EnvironmentVariableSetup,
+ ExcelDataset,
+ ExcelSource,
ExecuteDataFlowActivity,
ExecuteDataFlowActivityTypePropertiesCompute,
ExecutePipelineActivity,
ExecuteSSISPackageActivity,
ExecutionActivity,
+ ExportSettings,
Expression,
Factory,
FactoryGitHubConfiguration,
@@ -262,6 +267,7 @@ export {
ImpalaLinkedService,
ImpalaObjectDataset,
ImpalaSource,
+ ImportSettings,
InformixLinkedService,
InformixSink,
InformixSource,
@@ -281,6 +287,7 @@ export {
JiraSource,
JsonDataset,
JsonFormat,
+ JsonReadSettings,
JsonSink,
JsonSource,
JsonWriteSettings,
@@ -344,6 +351,7 @@ export {
OrcFormat,
OrcSink,
OrcSource,
+ PackageStore,
ParameterSpecification,
ParquetDataset,
ParquetFormat,
@@ -430,10 +438,19 @@ export {
SftpReadSettings,
SftpServerLinkedService,
SftpWriteSettings,
+ SharePointOnlineListLinkedService,
+ SharePointOnlineListResourceDataset,
+ SharePointOnlineListSource,
ShopifyLinkedService,
ShopifyObjectDataset,
ShopifySource,
SkipErrorFile,
+ SnowflakeDataset,
+ SnowflakeExportCopyCommand,
+ SnowflakeImportCopyCommand,
+ SnowflakeLinkedService,
+ SnowflakeSink,
+ SnowflakeSource,
SparkLinkedService,
SparkObjectDataset,
SparkSource,
@@ -507,6 +524,10 @@ export {
XeroLinkedService,
XeroObjectDataset,
XeroSource,
+ XmlDataset,
+ XmlReadSettings,
+ XmlSource,
+ ZipDeflateReadSettings,
ZohoLinkedService,
ZohoObjectDataset,
ZohoSource