From a212d72c674009a6ecd9f1d9cd529094f2522de7 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot Date: Wed, 25 Sep 2019 04:39:57 +0000 Subject: [PATCH 1/2] Generated from bba298ece227fa2df07290dce6ce056d546c2259 [DataFactory]Add AzureFileStorage and GoogleCloudStorage --- .../azure/mgmt/datafactory/models/__init__.py | 18 + .../azure/mgmt/datafactory/models/_models.py | 355 ++++++++++++++++- .../mgmt/datafactory/models/_models_py3.py | 357 +++++++++++++++++- 3 files changed, 711 insertions(+), 19 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index d947b8055c7b..2d883a7db8f3 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -60,6 +60,9 @@ from ._models_py3 import AzureDataLakeStoreSink from ._models_py3 import AzureDataLakeStoreSource from ._models_py3 import AzureDataLakeStoreWriteSettings + from ._models_py3 import AzureFileStorageLinkedService + from ._models_py3 import AzureFileStorageLocation + from ._models_py3 import AzureFileStorageReadSettings from ._models_py3 import AzureFunctionActivity from ._models_py3 import AzureFunctionLinkedService from ._models_py3 import AzureKeyVaultLinkedService @@ -242,6 +245,9 @@ from ._models_py3 import GoogleBigQueryLinkedService from ._models_py3 import GoogleBigQueryObjectDataset from ._models_py3 import GoogleBigQuerySource + from ._models_py3 import GoogleCloudStorageLinkedService + from ._models_py3 import GoogleCloudStorageLocation + from ._models_py3 import GoogleCloudStorageReadSettings from ._models_py3 import GreenplumLinkedService from ._models_py3 import GreenplumSource from ._models_py3 import GreenplumTableDataset @@ -610,6 +616,9 @@ from ._models import AzureDataLakeStoreSink from ._models import AzureDataLakeStoreSource from ._models import AzureDataLakeStoreWriteSettings + from ._models import AzureFileStorageLinkedService + from ._models import AzureFileStorageLocation + from ._models import AzureFileStorageReadSettings from ._models import AzureFunctionActivity from ._models import AzureFunctionLinkedService from ._models import AzureKeyVaultLinkedService @@ -792,6 +801,9 @@ from ._models import GoogleBigQueryLinkedService from ._models import GoogleBigQueryObjectDataset from ._models import GoogleBigQuerySource + from ._models import GoogleCloudStorageLinkedService + from ._models import GoogleCloudStorageLocation + from ._models import GoogleCloudStorageReadSettings from ._models import GreenplumLinkedService from ._models import GreenplumSource from ._models import GreenplumTableDataset @@ -1246,6 +1258,9 @@ 'AzureDataLakeStoreSink', 'AzureDataLakeStoreSource', 'AzureDataLakeStoreWriteSettings', + 'AzureFileStorageLinkedService', + 'AzureFileStorageLocation', + 'AzureFileStorageReadSettings', 'AzureFunctionActivity', 'AzureFunctionLinkedService', 'AzureKeyVaultLinkedService', @@ -1428,6 +1443,9 @@ 'GoogleBigQueryLinkedService', 'GoogleBigQueryObjectDataset', 'GoogleBigQuerySource', + 'GoogleCloudStorageLinkedService', + 'GoogleCloudStorageLocation', + 'GoogleCloudStorageReadSettings', 'GreenplumLinkedService', 'GreenplumSource', 'GreenplumTableDataset', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index b7d729eb84dc..b328607503b8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -336,14 +336,15 @@ class LinkedService(Model): MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, - AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, CommonDataServiceForAppsLinkedService, - DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, - AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService + AzureMySqlLinkedService, OracleLinkedService, + GoogleCloudStorageLinkedService, AzureFileStorageLinkedService, + FileServerLinkedService, HDInsightLinkedService, + CommonDataServiceForAppsLinkedService, DynamicsCrmLinkedService, + DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, + AzureBatchLinkedService, AzureSqlMILinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -379,7 +380,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): @@ -3663,6 +3664,162 @@ def __init__(self, **kwargs): self.type = 'AzureDataLakeStoreWriteSettings' +class AzureFileStorageLinkedService(LinkedService): + """Azure File Storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFileStorageLinkedService, self).__init__(**kwargs) + self.host = kwargs.get('host', None) + self.user_id = kwargs.get('user_id', None) + self.password = kwargs.get('password', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AzureFileStorage' + + +class AzureFileStorageLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFileStorageLocation, self).__init__(**kwargs) + + +class AzureFileStorageReadSettings(StoreReadSettings): + """Azure File Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: + string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure File Storage wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AzureFileStorageReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. @@ -4525,6 +4682,9 @@ class AzureMySqlTableDataset(Dataset): :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). :type table_name: object + :param table: The name of Azure MySQL database table. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { @@ -4543,11 +4703,13 @@ class AzureMySqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } def __init__(self, **kwargs): super(AzureMySqlTableDataset, self).__init__(**kwargs) self.table_name = kwargs.get('table_name', None) + self.table = kwargs.get('table', None) self.type = 'AzureMySqlTable' @@ -13348,6 +13510,181 @@ def __init__(self, **kwargs): self.type = 'GoogleBigQuerySource' +class GoogleCloudStorageLinkedService(LinkedService): + """Linked service for Google Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Google Cloud + Storage Identity and Access Management (IAM) user. Type: string (or + Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Google Cloud + Storage Identity and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + Google Cloud Storage Connector. This is an optional property; change it + only if you want to try a different service endpoint or want to switch + between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleCloudStorageLinkedService, self).__init__(**kwargs) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'GoogleCloudStorage' + + +class GoogleCloudStorageLocation(DatasetLocation): + """The location of Google Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: + string (or Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of Google Cloud Storage. Type: string + (or Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleCloudStorageLocation, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + + +class GoogleCloudStorageReadSettings(StoreReadSettings): + """Google Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. + Type: string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Google Cloud Storage object name. + Type: string (or Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(GoogleCloudStorageReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + + class GreenplumLinkedService(LinkedService): """Greenplum Database linked service. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index c72789617866..6576d4566078 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -336,14 +336,15 @@ class LinkedService(Model): MicrosoftAccessLinkedService, InformixLinkedService, OdbcLinkedService, AzureMLLinkedService, TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, - AzureMySqlLinkedService, OracleLinkedService, FileServerLinkedService, - HDInsightLinkedService, CommonDataServiceForAppsLinkedService, - DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, - AzureKeyVaultLinkedService, AzureBatchLinkedService, - AzureSqlMILinkedService, AzureSqlDatabaseLinkedService, - SqlServerLinkedService, AzureSqlDWLinkedService, - AzureTableStorageLinkedService, AzureBlobStorageLinkedService, - AzureStorageLinkedService + AzureMySqlLinkedService, OracleLinkedService, + GoogleCloudStorageLinkedService, AzureFileStorageLinkedService, + FileServerLinkedService, HDInsightLinkedService, + CommonDataServiceForAppsLinkedService, DynamicsCrmLinkedService, + DynamicsLinkedService, CosmosDbLinkedService, AzureKeyVaultLinkedService, + AzureBatchLinkedService, AzureSqlMILinkedService, + AzureSqlDatabaseLinkedService, SqlServerLinkedService, + AzureSqlDWLinkedService, AzureTableStorageLinkedService, + AzureBlobStorageLinkedService, AzureStorageLinkedService All required parameters must be populated in order to send to Azure. @@ -379,7 +380,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: @@ -3663,6 +3664,162 @@ def __init__(self, *, additional_properties=None, max_concurrent_connections=Non self.type = 'AzureDataLakeStoreWriteSettings' +class AzureFileStorageLinkedService(LinkedService): + """Azure File Storage linked service. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param host: Required. Host name of the server. Type: string (or + Expression with resultType string). + :type host: object + :param user_id: User ID to logon the server. Type: string (or Expression + with resultType string). + :type user_id: object + :param password: Password to logon the server. + :type password: ~azure.mgmt.datafactory.models.SecretBase + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + 'host': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'host': {'key': 'typeProperties.host', 'type': 'object'}, + 'user_id': {'key': 'typeProperties.userId', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, host, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_id=None, password=None, encrypted_credential=None, **kwargs) -> None: + super(AzureFileStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.host = host + self.user_id = user_id + self.password = password + self.encrypted_credential = encrypted_credential + self.type = 'AzureFileStorage' + + +class AzureFileStorageLocation(DatasetLocation): + """The location of file server dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(AzureFileStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + + +class AzureFileStorageReadSettings(StoreReadSettings): + """Azure File Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Azure File Storage wildcardFolderPath. Type: + string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Azure File Storage wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AzureFileStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + class AzureFunctionActivity(ExecutionActivity): """Azure Function activity. @@ -4525,6 +4682,9 @@ class AzureMySqlTableDataset(Dataset): :param table_name: The Azure MySQL database table name. Type: string (or Expression with resultType string). :type table_name: object + :param table: The name of Azure MySQL database table. Type: string (or + Expression with resultType string). + :type table: object """ _validation = { @@ -4543,11 +4703,13 @@ class AzureMySqlTableDataset(Dataset): 'folder': {'key': 'folder', 'type': 'DatasetFolder'}, 'type': {'key': 'type', 'type': 'str'}, 'table_name': {'key': 'typeProperties.tableName', 'type': 'object'}, + 'table': {'key': 'typeProperties.table', 'type': 'object'}, } - def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, **kwargs) -> None: + def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, table_name=None, table=None, **kwargs) -> None: super(AzureMySqlTableDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs) self.table_name = table_name + self.table = table self.type = 'AzureMySqlTable' @@ -13348,6 +13510,181 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'GoogleBigQuerySource' +class GoogleCloudStorageLinkedService(LinkedService): + """Linked service for Google Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Google Cloud + Storage Identity and Access Management (IAM) user. Type: string (or + Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Google Cloud + Storage Identity and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + Google Cloud Storage Connector. This is an optional property; change it + only if you want to try a different service endpoint or want to switch + between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: + super(GoogleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + self.type = 'GoogleCloudStorage' + + +class GoogleCloudStorageLocation(DatasetLocation): + """The location of Google Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. Type of dataset storage location. + :type type: str + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param bucket_name: Specify the bucketName of Google Cloud Storage. Type: + string (or Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of Google Cloud Storage. Type: string + (or Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(GoogleCloudStorageLocation, self).__init__(additional_properties=additional_properties, type=type, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version + + +class GoogleCloudStorageReadSettings(StoreReadSettings): + """Google Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param type: Required. The read setting type. + :type type: str + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Google Cloud Storage wildcardFolderPath. + Type: string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Google Cloud Storage wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Google Cloud Storage object name. + Type: string (or Expression with resultType string). + :type prefix: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, type: str, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, enable_partition_discovery: bool=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(GoogleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, type=type, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.enable_partition_discovery = enable_partition_discovery + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + + class GreenplumLinkedService(LinkedService): """Greenplum Database linked service. From efc4e5ea16d2a9bf62a4323437b2eaf29955db07 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot Date: Sun, 29 Sep 2019 03:22:20 +0000 Subject: [PATCH 2/2] Generated from bba298ece227fa2df07290dce6ce056d546c2259 [DataFactory]Add AzureFileStorage and GoogleCloudStorage --- .../azure/mgmt/datafactory/models/__init__.py | 5 + .../_data_factory_management_client_enums.py | 7 + .../azure/mgmt/datafactory/models/_models.py | 59 +++++-- .../mgmt/datafactory/models/_models_py3.py | 71 +++++--- .../_data_flow_debug_session_operations.py | 164 +++++++++++++----- 5 files changed, 226 insertions(+), 80 deletions(-) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 2d883a7db8f3..12b561be099f 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -289,6 +289,7 @@ from ._models_py3 import IntegrationRuntimeComputeProperties from ._models_py3 import IntegrationRuntimeConnectionInfo from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties + from ._models_py3 import IntegrationRuntimeDataFlowProperties from ._models_py3 import IntegrationRuntimeDataProxyProperties from ._models_py3 import IntegrationRuntimeMonitoringData from ._models_py3 import IntegrationRuntimeNodeIpAddress @@ -845,6 +846,7 @@ from ._models import IntegrationRuntimeComputeProperties from ._models import IntegrationRuntimeConnectionInfo from ._models import IntegrationRuntimeCustomSetupScriptProperties + from ._models import IntegrationRuntimeDataFlowProperties from ._models import IntegrationRuntimeDataProxyProperties from ._models import IntegrationRuntimeMonitoringData from ._models import IntegrationRuntimeNodeIpAddress @@ -1203,6 +1205,7 @@ IntegrationRuntimeSsisCatalogPricingTier, IntegrationRuntimeLicenseType, IntegrationRuntimeEdition, + DataFlowComputeType, SsisObjectMetadataType, IntegrationRuntimeAuthKeyName, ) @@ -1487,6 +1490,7 @@ 'IntegrationRuntimeComputeProperties', 'IntegrationRuntimeConnectionInfo', 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeDataFlowProperties', 'IntegrationRuntimeDataProxyProperties', 'IntegrationRuntimeMonitoringData', 'IntegrationRuntimeNodeIpAddress', @@ -1844,6 +1848,7 @@ 'IntegrationRuntimeSsisCatalogPricingTier', 'IntegrationRuntimeLicenseType', 'IntegrationRuntimeEdition', + 'DataFlowComputeType', 'SsisObjectMetadataType', 'IntegrationRuntimeAuthKeyName', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 052712638b66..e3abf23ceae4 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -552,6 +552,13 @@ class IntegrationRuntimeEdition(str, Enum): enterprise = "Enterprise" +class DataFlowComputeType(str, Enum): + + general = "General" + memory_optimized = "MemoryOptimized" + compute_optimized = "ComputeOptimized" + + class SsisObjectMetadataType(str, Enum): folder = "Folder" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index b328607503b8..70b3013850f8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -9144,9 +9144,6 @@ class Transformation(Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] :param name: Required. Transformation name. :type name: str :param description: Transformation description. @@ -9158,14 +9155,12 @@ class Transformation(Model): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } def __init__(self, **kwargs): super(Transformation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) self.name = kwargs.get('name', None) self.description = kwargs.get('description', None) @@ -9175,9 +9170,6 @@ class DataFlowSink(Transformation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] :param name: Required. Transformation name. :type name: str :param description: Transformation description. @@ -9191,7 +9183,6 @@ class DataFlowSink(Transformation): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, @@ -9199,7 +9190,6 @@ class DataFlowSink(Transformation): def __init__(self, **kwargs): super(DataFlowSink, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) self.dataset = kwargs.get('dataset', None) @@ -9208,9 +9198,6 @@ class DataFlowSource(Transformation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] :param name: Required. Transformation name. :type name: str :param description: Transformation description. @@ -9224,7 +9211,6 @@ class DataFlowSource(Transformation): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, @@ -9232,7 +9218,6 @@ class DataFlowSource(Transformation): def __init__(self, **kwargs): super(DataFlowSource, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) self.dataset = kwargs.get('dataset', None) @@ -16411,6 +16396,10 @@ class IntegrationRuntimeComputeProperties(Model): :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime. :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration + runtime. + :type data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties :param v_net_properties: VNet properties for managed integration runtime. :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties @@ -16427,6 +16416,7 @@ class IntegrationRuntimeComputeProperties(Model): 'node_size': {'key': 'nodeSize', 'type': 'str'}, 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, } @@ -16437,6 +16427,7 @@ def __init__(self, **kwargs): self.node_size = kwargs.get('node_size', None) self.number_of_nodes = kwargs.get('number_of_nodes', None) self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.data_flow_properties = kwargs.get('data_flow_properties', None) self.v_net_properties = kwargs.get('v_net_properties', None) @@ -16519,6 +16510,44 @@ def __init__(self, **kwargs): self.sas_token = kwargs.get('sas_token', None) +class IntegrationRuntimeDataFlowProperties(Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data + flow job. Possible values include: 'General', 'MemoryOptimized', + 'ComputeOptimized' + :type compute_type: str or + ~azure.mgmt.datafactory.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow + job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster + which will execute data flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__(self, **kwargs): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.time_to_live = kwargs.get('time_to_live', None) + + class IntegrationRuntimeDataProxyProperties(Model): """Data proxy properties for a managed dedicated integration runtime. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 6576d4566078..c2948234f28d 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -9144,9 +9144,6 @@ class Transformation(Model): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] :param name: Required. Transformation name. :type name: str :param description: Transformation description. @@ -9158,14 +9155,12 @@ class Transformation(Model): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } - def __init__(self, *, name: str, additional_properties=None, description: str=None, **kwargs) -> None: + def __init__(self, *, name: str, description: str=None, **kwargs) -> None: super(Transformation, self).__init__(**kwargs) - self.additional_properties = additional_properties self.name = name self.description = description @@ -9175,9 +9170,6 @@ class DataFlowSink(Transformation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] :param name: Required. Transformation name. :type name: str :param description: Transformation description. @@ -9191,15 +9183,13 @@ class DataFlowSink(Transformation): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, } - def __init__(self, *, name: str, additional_properties=None, description: str=None, dataset=None, **kwargs) -> None: - super(DataFlowSink, self).__init__(additional_properties=additional_properties, name=name, description=description, **kwargs) - self.additional_properties = additional_properties + def __init__(self, *, name: str, description: str=None, dataset=None, **kwargs) -> None: + super(DataFlowSink, self).__init__(name=name, description=description, **kwargs) self.dataset = dataset @@ -9208,9 +9198,6 @@ class DataFlowSource(Transformation): All required parameters must be populated in order to send to Azure. - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] :param name: Required. Transformation name. :type name: str :param description: Transformation description. @@ -9224,15 +9211,13 @@ class DataFlowSource(Transformation): } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, 'name': {'key': 'name', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'dataset': {'key': 'dataset', 'type': 'DatasetReference'}, } - def __init__(self, *, name: str, additional_properties=None, description: str=None, dataset=None, **kwargs) -> None: - super(DataFlowSource, self).__init__(additional_properties=additional_properties, name=name, description=description, **kwargs) - self.additional_properties = additional_properties + def __init__(self, *, name: str, description: str=None, dataset=None, **kwargs) -> None: + super(DataFlowSource, self).__init__(name=name, description=description, **kwargs) self.dataset = dataset @@ -16411,6 +16396,10 @@ class IntegrationRuntimeComputeProperties(Model): :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime. :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration + runtime. + :type data_flow_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties :param v_net_properties: VNet properties for managed integration runtime. :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties @@ -16427,16 +16416,18 @@ class IntegrationRuntimeComputeProperties(Model): 'node_size': {'key': 'nodeSize', 'type': 'str'}, 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, } - def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, v_net_properties=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, location: str=None, node_size: str=None, number_of_nodes: int=None, max_parallel_executions_per_node: int=None, data_flow_properties=None, v_net_properties=None, **kwargs) -> None: super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) self.additional_properties = additional_properties self.location = location self.node_size = node_size self.number_of_nodes = number_of_nodes self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.data_flow_properties = data_flow_properties self.v_net_properties = v_net_properties @@ -16519,6 +16510,44 @@ def __init__(self, *, blob_container_uri: str=None, sas_token=None, **kwargs) -> self.sas_token = sas_token +class IntegrationRuntimeDataFlowProperties(Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data + flow job. Possible values include: 'General', 'MemoryOptimized', + 'ComputeOptimized' + :type compute_type: str or + ~azure.mgmt.datafactory.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow + job. Supported values are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster + which will execute data flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__(self, *, additional_properties=None, compute_type=None, core_count: int=None, time_to_live: int=None, **kwargs) -> None: + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.compute_type = compute_type + self.core_count = core_count + self.time_to_live = time_to_live + + class IntegrationRuntimeDataProxyProperties(Model): """Data proxy properties for a managed dedicated integration runtime. diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py index 8281c8a22a5d..c46f24701eb0 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_data_flow_debug_session_operations.py @@ -12,6 +12,8 @@ import uuid from msrest.pipeline import ClientRawResponse from msrestazure.azure_exceptions import CloudError +from msrest.polling import LROPoller, NoPolling +from msrestazure.polling.arm_polling import ARMPolling from .. import models @@ -39,29 +41,9 @@ def __init__(self, client, config, serializer, deserializer): self.config = config - def create( - self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): - """Creates a data flow debug session. - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param request: Data flow debug session definition - :type request: - ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: CreateDataFlowDebugSessionResponse or ClientRawResponse if - raw=true - :rtype: - ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse or - ~msrest.pipeline.ClientRawResponse - :raises: :class:`CloudError` - """ + def _create_initial( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.create.metadata['url'] path_format_arguments = { @@ -98,8 +80,9 @@ def create( exp.request_id = response.headers.get('x-ms-request-id') raise exp - header_dict = {} deserialized = None + header_dict = {} + if response.status_code == 200: deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', response) header_dict = { @@ -112,6 +95,61 @@ def create( return client_raw_response return deserialized + + def create( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, polling=True, **operation_config): + """Creates a data flow debug session. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug session definition + :type request: + ~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + CreateDataFlowDebugSessionResponse or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.CreateDataFlowDebugSessionResponse]] + :raises: :class:`CloudError` + """ + raw_result = self._create_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + request=request, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'location': 'str', + } + deserialized = self._deserialize('CreateDataFlowDebugSessionResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/createDataFlowDebugSession'} def query_by_factory( @@ -316,27 +354,9 @@ def delete( return client_raw_response delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/deleteDataFlowDebugSession'} - def execute_command( - self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): - """Execute a data flow debug command. - :param resource_group_name: The resource group name. - :type resource_group_name: str - :param factory_name: The factory name. - :type factory_name: str - :param request: Data flow debug command definition. - :type request: - ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest - :param dict custom_headers: headers that will be added to the request - :param bool raw: returns the direct response alongside the - deserialized response - :param operation_config: :ref:`Operation configuration - overrides`. - :return: DataFlowDebugCommandResponse or ClientRawResponse if raw=true - :rtype: ~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse or - ~msrest.pipeline.ClientRawResponse - :raises: :class:`CloudError` - """ + def _execute_command_initial( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, **operation_config): # Construct URL url = self.execute_command.metadata['url'] path_format_arguments = { @@ -373,8 +393,9 @@ def execute_command( exp.request_id = response.headers.get('x-ms-request-id') raise exp - header_dict = {} deserialized = None + header_dict = {} + if response.status_code == 200: deserialized = self._deserialize('DataFlowDebugCommandResponse', response) header_dict = { @@ -387,4 +408,59 @@ def execute_command( return client_raw_response return deserialized + + def execute_command( + self, resource_group_name, factory_name, request, custom_headers=None, raw=False, polling=True, **operation_config): + """Execute a data flow debug command. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param request: Data flow debug command definition. + :type request: + ~azure.mgmt.datafactory.models.DataFlowDebugCommandRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: The poller return type is ClientRawResponse, the + direct response alongside the deserialized response + :param polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :return: An instance of LROPoller that returns + DataFlowDebugCommandResponse or + ClientRawResponse if raw==True + :rtype: + ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse] + or + ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.datafactory.models.DataFlowDebugCommandResponse]] + :raises: :class:`CloudError` + """ + raw_result = self._execute_command_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + request=request, + custom_headers=custom_headers, + raw=True, + **operation_config + ) + + def get_long_running_output(response): + header_dict = { + 'location': 'str', + } + deserialized = self._deserialize('DataFlowDebugCommandResponse', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + client_raw_response.add_headers(header_dict) + return client_raw_response + + return deserialized + + lro_delay = operation_config.get( + 'long_running_operation_timeout', + self.config.long_running_operation_timeout) + if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) execute_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/executeDataFlowDebugCommand'}