From 58787603df370831341646efb0a2ca23ed72fae8 Mon Sep 17 00:00:00 2001 From: SDKAuto Date: Wed, 7 Apr 2021 09:19:16 +0000 Subject: [PATCH] CodeGen from PR 13669 in Azure/azure-rest-api-specs Merge 2f881fcb7113b48b154e32ad678ae427331dad45 into 9fbd9c69e95c30c0805b34d8b6e23a385dc0d6cc --- .../azure-mgmt-datafactory/MANIFEST.in | 1 + .../azure-mgmt-datafactory/_meta.json | 8 + .../_data_factory_management_client.py | 15 + .../azure/mgmt/datafactory/models/__init__.py | 58 + .../_data_factory_management_client_enums.py | 6 + .../azure/mgmt/datafactory/models/_models.py | 1092 ++++++++++++++-- .../mgmt/datafactory/models/_models_py3.py | 1162 ++++++++++++++--- .../mgmt/datafactory/models/_paged_models.py | 13 + .../mgmt/datafactory/operations/__init__.py | 6 + ...rivate_end_point_connections_operations.py | 113 ++ ..._private_endpoint_connection_operations.py | 252 ++++ .../_private_link_resources_operations.py | 101 ++ 12 files changed, 2536 insertions(+), 291 deletions(-) create mode 100644 sdk/datafactory/azure-mgmt-datafactory/_meta.json create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py create mode 100644 sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in index a3cb07df8765..3a9b6517412b 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in +++ b/sdk/datafactory/azure-mgmt-datafactory/MANIFEST.in @@ -1,3 +1,4 @@ +include _meta.json recursive-include tests *.py *.yaml include *.md include azure/__init__.py diff --git a/sdk/datafactory/azure-mgmt-datafactory/_meta.json b/sdk/datafactory/azure-mgmt-datafactory/_meta.json new file mode 100644 index 000000000000..048551735e59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/_meta.json @@ -0,0 +1,8 @@ +{ + "autorest": "V2", + "use": "@microsoft.azure/autorest.python@~4.0.71", + "commit": "361651646870a3cfad36f9caee14db72c57c1460", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "autorest_command": "autorest specification/datafactory/resource-manager/readme.md --keep-version-file --multiapi --no-async --python --python-mode=update --python-sdks-folder=/home/vsts/work/1/s/azure-sdk-for-python/sdk --use=@microsoft.azure/autorest.python@~4.0.71 --version=V2", + "readme": "specification/datafactory/resource-manager/readme.md" +} \ No newline at end of file diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py index d80dbd4cbb10..26a14cdd2dd6 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/_data_factory_management_client.py @@ -30,6 +30,9 @@ from .operations import DataFlowDebugSessionOperations from .operations import ManagedVirtualNetworksOperations from .operations import ManagedPrivateEndpointsOperations +from .operations import PrivateEndPointConnectionsOperations +from .operations import PrivateEndpointConnectionOperations +from .operations import PrivateLinkResourcesOperations from . import models @@ -73,6 +76,12 @@ class DataFactoryManagementClient(SDKClient): :vartype managed_virtual_networks: azure.mgmt.datafactory.operations.ManagedVirtualNetworksOperations :ivar managed_private_endpoints: ManagedPrivateEndpoints operations :vartype managed_private_endpoints: azure.mgmt.datafactory.operations.ManagedPrivateEndpointsOperations + :ivar private_end_point_connections: PrivateEndPointConnections operations + :vartype private_end_point_connections: azure.mgmt.datafactory.operations.PrivateEndPointConnectionsOperations + :ivar private_endpoint_connection: PrivateEndpointConnection operations + :vartype private_endpoint_connection: azure.mgmt.datafactory.operations.PrivateEndpointConnectionOperations + :ivar private_link_resources: PrivateLinkResources operations + :vartype private_link_resources: azure.mgmt.datafactory.operations.PrivateLinkResourcesOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials @@ -127,3 +136,9 @@ def __init__( self._client, self.config, self._serialize, self._deserialize) self.managed_private_endpoints = ManagedPrivateEndpointsOperations( self._client, self.config, self._serialize, self._deserialize) + self.private_end_point_connections = PrivateEndPointConnectionsOperations( + self._client, self.config, self._serialize, self._deserialize) + self.private_endpoint_connection = PrivateEndpointConnectionOperations( + self._client, self.config, self._serialize, self._deserialize) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self.config, self._serialize, self._deserialize) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py index 0a5ec4b52c56..c7ad41126f02 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/__init__.py @@ -24,11 +24,15 @@ from ._models_py3 import AmazonRedshiftLinkedService from ._models_py3 import AmazonRedshiftSource from ._models_py3 import AmazonRedshiftTableDataset + from ._models_py3 import AmazonS3CompatibleLinkedService + from ._models_py3 import AmazonS3CompatibleLocation + from ._models_py3 import AmazonS3CompatibleReadSettings from ._models_py3 import AmazonS3Dataset from ._models_py3 import AmazonS3LinkedService from ._models_py3 import AmazonS3Location from ._models_py3 import AmazonS3ReadSettings from ._models_py3 import AppendVariableActivity + from ._models_py3 import ArmIdWrapper from ._models_py3 import AvroDataset from ._models_py3 import AvroFormat from ._models_py3 import AvroSink @@ -387,6 +391,7 @@ from ._models_py3 import MicrosoftAccessTableDataset from ._models_py3 import MongoDbAtlasCollectionDataset from ._models_py3 import MongoDbAtlasLinkedService + from ._models_py3 import MongoDbAtlasSink from ._models_py3 import MongoDbAtlasSource from ._models_py3 import MongoDbCollectionDataset from ._models_py3 import MongoDbCursorMethodsProperties @@ -394,6 +399,7 @@ from ._models_py3 import MongoDbSource from ._models_py3 import MongoDbV2CollectionDataset from ._models_py3 import MongoDbV2LinkedService + from ._models_py3 import MongoDbV2Sink from ._models_py3 import MongoDbV2Source from ._models_py3 import MultiplePipelineTrigger from ._models_py3 import MySqlLinkedService @@ -420,6 +426,9 @@ from ._models_py3 import OperationMetricDimension from ._models_py3 import OperationMetricSpecification from ._models_py3 import OperationServiceSpecification + from ._models_py3 import OracleCloudStorageLinkedService + from ._models_py3 import OracleCloudStorageLocation + from ._models_py3 import OracleCloudStorageReadSettings from ._models_py3 import OracleLinkedService from ._models_py3 import OraclePartitionSettings from ._models_py3 import OracleServiceCloudLinkedService @@ -461,6 +470,13 @@ from ._models_py3 import PrestoLinkedService from ._models_py3 import PrestoObjectDataset from ._models_py3 import PrestoSource + from ._models_py3 import PrivateEndpointConnectionResource + from ._models_py3 import PrivateLinkConnectionApprovalRequest + from ._models_py3 import PrivateLinkConnectionApprovalRequestResource + from ._models_py3 import PrivateLinkConnectionState + from ._models_py3 import PrivateLinkResource + from ._models_py3 import PrivateLinkResourceProperties + from ._models_py3 import PrivateLinkResourcesWrapper from ._models_py3 import QuickBooksLinkedService from ._models_py3 import QuickBooksObjectDataset from ._models_py3 import QuickBooksSource @@ -470,6 +486,7 @@ from ._models_py3 import RedshiftUnloadSettings from ._models_py3 import RelationalSource from ._models_py3 import RelationalTableDataset + from ._models_py3 import RemotePrivateEndpointConnection from ._models_py3 import RerunTumblingWindowTrigger from ._models_py3 import Resource from ._models_py3 import ResponsysLinkedService @@ -548,6 +565,7 @@ from ._models_py3 import SparkLinkedService from ._models_py3 import SparkObjectDataset from ._models_py3 import SparkSource + from ._models_py3 import SqlAlwaysEncryptedProperties from ._models_py3 import SqlDWSink from ._models_py3 import SqlDWSource from ._models_py3 import SqlMISink @@ -660,11 +678,15 @@ from ._models import AmazonRedshiftLinkedService from ._models import AmazonRedshiftSource from ._models import AmazonRedshiftTableDataset + from ._models import AmazonS3CompatibleLinkedService + from ._models import AmazonS3CompatibleLocation + from ._models import AmazonS3CompatibleReadSettings from ._models import AmazonS3Dataset from ._models import AmazonS3LinkedService from ._models import AmazonS3Location from ._models import AmazonS3ReadSettings from ._models import AppendVariableActivity + from ._models import ArmIdWrapper from ._models import AvroDataset from ._models import AvroFormat from ._models import AvroSink @@ -1023,6 +1045,7 @@ from ._models import MicrosoftAccessTableDataset from ._models import MongoDbAtlasCollectionDataset from ._models import MongoDbAtlasLinkedService + from ._models import MongoDbAtlasSink from ._models import MongoDbAtlasSource from ._models import MongoDbCollectionDataset from ._models import MongoDbCursorMethodsProperties @@ -1030,6 +1053,7 @@ from ._models import MongoDbSource from ._models import MongoDbV2CollectionDataset from ._models import MongoDbV2LinkedService + from ._models import MongoDbV2Sink from ._models import MongoDbV2Source from ._models import MultiplePipelineTrigger from ._models import MySqlLinkedService @@ -1056,6 +1080,9 @@ from ._models import OperationMetricDimension from ._models import OperationMetricSpecification from ._models import OperationServiceSpecification + from ._models import OracleCloudStorageLinkedService + from ._models import OracleCloudStorageLocation + from ._models import OracleCloudStorageReadSettings from ._models import OracleLinkedService from ._models import OraclePartitionSettings from ._models import OracleServiceCloudLinkedService @@ -1097,6 +1124,13 @@ from ._models import PrestoLinkedService from ._models import PrestoObjectDataset from ._models import PrestoSource + from ._models import PrivateEndpointConnectionResource + from ._models import PrivateLinkConnectionApprovalRequest + from ._models import PrivateLinkConnectionApprovalRequestResource + from ._models import PrivateLinkConnectionState + from ._models import PrivateLinkResource + from ._models import PrivateLinkResourceProperties + from ._models import PrivateLinkResourcesWrapper from ._models import QuickBooksLinkedService from ._models import QuickBooksObjectDataset from ._models import QuickBooksSource @@ -1106,6 +1140,7 @@ from ._models import RedshiftUnloadSettings from ._models import RelationalSource from ._models import RelationalTableDataset + from ._models import RemotePrivateEndpointConnection from ._models import RerunTumblingWindowTrigger from ._models import Resource from ._models import ResponsysLinkedService @@ -1184,6 +1219,7 @@ from ._models import SparkLinkedService from ._models import SparkObjectDataset from ._models import SparkSource + from ._models import SqlAlwaysEncryptedProperties from ._models import SqlDWSink from ._models import SqlDWSource from ._models import SqlMISink @@ -1291,6 +1327,7 @@ from ._paged_models import ManagedVirtualNetworkResourcePaged from ._paged_models import OperationPaged from ._paged_models import PipelineResourcePaged +from ._paged_models import PrivateEndpointConnectionResourcePaged from ._paged_models import TriggerResourcePaged from ._data_factory_management_client_enums import ( FactoryIdentityType, @@ -1337,6 +1374,7 @@ DynamicsAuthenticationType, CosmosDbServicePrincipalCredentialType, CosmosDbConnectionMode, + SqlAlwaysEncryptedAkvAuthType, OrcCompressionCodec, AvroCompressionCodec, TumblingWindowFrequency, @@ -1394,11 +1432,15 @@ 'AmazonRedshiftLinkedService', 'AmazonRedshiftSource', 'AmazonRedshiftTableDataset', + 'AmazonS3CompatibleLinkedService', + 'AmazonS3CompatibleLocation', + 'AmazonS3CompatibleReadSettings', 'AmazonS3Dataset', 'AmazonS3LinkedService', 'AmazonS3Location', 'AmazonS3ReadSettings', 'AppendVariableActivity', + 'ArmIdWrapper', 'AvroDataset', 'AvroFormat', 'AvroSink', @@ -1757,6 +1799,7 @@ 'MicrosoftAccessTableDataset', 'MongoDbAtlasCollectionDataset', 'MongoDbAtlasLinkedService', + 'MongoDbAtlasSink', 'MongoDbAtlasSource', 'MongoDbCollectionDataset', 'MongoDbCursorMethodsProperties', @@ -1764,6 +1807,7 @@ 'MongoDbSource', 'MongoDbV2CollectionDataset', 'MongoDbV2LinkedService', + 'MongoDbV2Sink', 'MongoDbV2Source', 'MultiplePipelineTrigger', 'MySqlLinkedService', @@ -1790,6 +1834,9 @@ 'OperationMetricDimension', 'OperationMetricSpecification', 'OperationServiceSpecification', + 'OracleCloudStorageLinkedService', + 'OracleCloudStorageLocation', + 'OracleCloudStorageReadSettings', 'OracleLinkedService', 'OraclePartitionSettings', 'OracleServiceCloudLinkedService', @@ -1831,6 +1878,13 @@ 'PrestoLinkedService', 'PrestoObjectDataset', 'PrestoSource', + 'PrivateEndpointConnectionResource', + 'PrivateLinkConnectionApprovalRequest', + 'PrivateLinkConnectionApprovalRequestResource', + 'PrivateLinkConnectionState', + 'PrivateLinkResource', + 'PrivateLinkResourceProperties', + 'PrivateLinkResourcesWrapper', 'QuickBooksLinkedService', 'QuickBooksObjectDataset', 'QuickBooksSource', @@ -1840,6 +1894,7 @@ 'RedshiftUnloadSettings', 'RelationalSource', 'RelationalTableDataset', + 'RemotePrivateEndpointConnection', 'RerunTumblingWindowTrigger', 'Resource', 'ResponsysLinkedService', @@ -1918,6 +1973,7 @@ 'SparkLinkedService', 'SparkObjectDataset', 'SparkSource', + 'SqlAlwaysEncryptedProperties', 'SqlDWSink', 'SqlDWSource', 'SqlMISink', @@ -2026,6 +2082,7 @@ 'DataFlowDebugSessionInfoPaged', 'ManagedVirtualNetworkResourcePaged', 'ManagedPrivateEndpointResourcePaged', + 'PrivateEndpointConnectionResourcePaged', 'FactoryIdentityType', 'GlobalParameterType', 'PublicNetworkAccess', @@ -2070,6 +2127,7 @@ 'DynamicsAuthenticationType', 'CosmosDbServicePrincipalCredentialType', 'CosmosDbConnectionMode', + 'SqlAlwaysEncryptedAkvAuthType', 'OrcCompressionCodec', 'AvroCompressionCodec', 'TumblingWindowFrequency', diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py index 84de50ffbe43..f8bab6f59a6e 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_data_factory_management_client_enums.py @@ -341,6 +341,12 @@ class CosmosDbConnectionMode(str, Enum): direct = "Direct" +class SqlAlwaysEncryptedAkvAuthType(str, Enum): + + service_principal = "ServicePrincipal" + managed_identity = "ManagedIdentity" + + class OrcCompressionCodec(str, Enum): none = "none" diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py index 9a5004466192..a0be7d6baba8 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models.py @@ -361,6 +361,7 @@ class LinkedService(Model): TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, GoogleCloudStorageLinkedService, + OracleCloudStorageLinkedService, AmazonS3CompatibleLinkedService, AzureFileStorageLinkedService, FileServerLinkedService, HDInsightLinkedService, CommonDataServiceForAppsLinkedService, DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, @@ -404,7 +405,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, **kwargs): @@ -1073,6 +1074,300 @@ def __init__(self, **kwargs): self.type = 'AmazonRedshiftTable' +class AmazonS3CompatibleLinkedService(LinkedService): + """Linked service for Amazon S3 Compatible. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 + Compatible Identity and Access Management (IAM) user. Type: string (or + Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 + Compatible Identity and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + Amazon S3 Compatible Connector. This is an optional property; change it + only if you want to try a different service endpoint or want to switch + between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param force_path_style: If true, use S3 path-style access instead of + virtual hosted-style access. Default value is false. Type: boolean (or + Expression with resultType boolean). + :type force_path_style: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'force_path_style': {'key': 'typeProperties.forcePathStyle', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3CompatibleLinkedService, self).__init__(**kwargs) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.force_path_style = kwargs.get('force_path_style', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'AmazonS3Compatible' + + +class DatasetLocation(Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: HdfsLocation, HttpServerLocation, SftpLocation, + FtpServerLocation, GoogleCloudStorageLocation, OracleCloudStorageLocation, + AmazonS3CompatibleLocation, AzureFileStorageLocation, FileServerLocation, + AmazonS3Location, AzureDataLakeStoreLocation, AzureBlobFSLocation, + AzureBlobStorageLocation + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'OracleCloudStorageLocation': 'OracleCloudStorageLocation', 'AmazonS3CompatibleLocation': 'AmazonS3CompatibleLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation'} + } + + def __init__(self, **kwargs): + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.folder_path = kwargs.get('folder_path', None) + self.file_name = kwargs.get('file_name', None) + self.type = None + + +class AmazonS3CompatibleLocation(DatasetLocation): + """The location of Amazon S3 Compatible dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: + string (or Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of Amazon S3 Compatible. Type: string + (or Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3CompatibleLocation, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + self.type = 'AmazonS3CompatibleLocation' + + +class StoreReadSettings(Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: HdfsReadSettings, HttpReadSettings, SftpReadSettings, + FtpReadSettings, GoogleCloudStorageReadSettings, + OracleCloudStorageReadSettings, AmazonS3CompatibleReadSettings, + AzureFileStorageReadSettings, FileServerReadSettings, AmazonS3ReadSettings, + AzureDataLakeStoreReadSettings, AzureBlobFSReadSettings, + AzureBlobStorageReadSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'OracleCloudStorageReadSettings': 'OracleCloudStorageReadSettings', 'AmazonS3CompatibleReadSettings': 'AmazonS3CompatibleReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings'} + } + + def __init__(self, **kwargs): + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) + self.type = None + + +class AmazonS3CompatibleReadSettings(StoreReadSettings): + """Amazon S3 Compatible read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. + Type: string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 Compatible object name. Type: + string (or Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files + need to be deleted after copy completion. Default is false. Type: boolean + (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AmazonS3CompatibleReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.type = 'AmazonS3CompatibleReadSettings' + + class AmazonS3Dataset(Dataset): """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. @@ -1244,53 +1539,6 @@ def __init__(self, **kwargs): self.type = 'AmazonS3' -class DatasetLocation(Model): - """Dataset location. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HdfsLocation, HttpServerLocation, SftpLocation, - FtpServerLocation, GoogleCloudStorageLocation, AzureFileStorageLocation, - FileServerLocation, AmazonS3Location, AzureDataLakeStoreLocation, - AzureBlobFSLocation, AzureBlobStorageLocation - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation'} - } - - def __init__(self, **kwargs): - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.folder_path = kwargs.get('folder_path', None) - self.file_name = kwargs.get('file_name', None) - self.type = None - - class AmazonS3Location(DatasetLocation): """The location of amazon S3 dataset. @@ -1335,52 +1583,8 @@ def __init__(self, **kwargs): self.type = 'AmazonS3Location' -class StoreReadSettings(Model): - """Connector read setting. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HdfsReadSettings, HttpReadSettings, SftpReadSettings, - FtpReadSettings, GoogleCloudStorageReadSettings, - AzureFileStorageReadSettings, FileServerReadSettings, AmazonS3ReadSettings, - AzureDataLakeStoreReadSettings, AzureBlobFSReadSettings, - AzureBlobStorageReadSettings - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings'} - } - - def __init__(self, **kwargs): - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.max_concurrent_connections = kwargs.get('max_concurrent_connections', None) - self.type = None - - class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. + """Amazon S3 read settings. All required parameters must be populated in order to send to Azure. @@ -1539,26 +1743,49 @@ class AppendVariableActivity(ControlActivity): """ _validation = { - 'name': {'required': True}, - 'type': {'required': True}, + 'name': {'required': True}, + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, + 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'value': {'key': 'typeProperties.value', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(AppendVariableActivity, self).__init__(**kwargs) + self.variable_name = kwargs.get('variable_name', None) + self.value = kwargs.get('value', None) + self.type = 'AppendVariable' + + +class ArmIdWrapper(Model): + """A wrapper for an ARM resource id. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, } _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'depends_on': {'key': 'dependsOn', 'type': '[ActivityDependency]'}, - 'user_properties': {'key': 'userProperties', 'type': '[UserProperty]'}, - 'type': {'key': 'type', 'type': 'str'}, - 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, - 'value': {'key': 'typeProperties.value', 'type': 'object'}, + 'id': {'key': 'id', 'type': 'str'}, } def __init__(self, **kwargs): - super(AppendVariableActivity, self).__init__(**kwargs) - self.variable_name = kwargs.get('variable_name', None) - self.value = kwargs.get('value', None) - self.type = 'AppendVariable' + super(ArmIdWrapper, self).__init__(**kwargs) + self.id = None class AvroDataset(Dataset): @@ -1715,11 +1942,12 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, - SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, - DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, - AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + sub-classes are: CosmosDbMongoDbApiSink, MongoDbV2Sink, MongoDbAtlasSink, + SalesforceServiceCloudSink, SalesforceSink, AzureDataExplorerSink, + CommonDataServiceForAppsSink, DynamicsCrmSink, DynamicsSink, + MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SnowflakeSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, @@ -1767,7 +1995,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, **kwargs): @@ -2716,6 +2944,11 @@ class AzureBlobStorageLinkedService(LinkedService): AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object + :param account_kind: Specify the kind of your storage account. Allowed + values are: Storage (general purpose v1), StorageV2 (general purpose v2), + BlobStorage, or BlockBlobStorage. Type: string (or Expression with + resultType string). + :type account_kind: str :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2742,6 +2975,7 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, + 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } @@ -2756,6 +2990,7 @@ def __init__(self, **kwargs): self.service_principal_key = kwargs.get('service_principal_key', None) self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) + self.account_kind = kwargs.get('account_kind', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) self.type = 'AzureBlobStorage' @@ -3611,28 +3846,25 @@ class AzureDataExplorerLinkedService(LinkedService): https://..kusto.windows.net. Type: string (or Expression with resultType string) :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). + :param service_principal_id: The ID of the service principal used to + authenticate against Azure Data Explorer. Type: string (or Expression with + resultType string). :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. + :param service_principal_key: The key of the service principal used to + authenticate against Kusto. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). :type tenant: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { @@ -6218,6 +6450,9 @@ class AzureSqlDatabaseLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6239,6 +6474,7 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__(self, **kwargs): @@ -6250,6 +6486,7 @@ def __init__(self, **kwargs): self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) self.type = 'AzureSqlDatabase' @@ -6453,6 +6690,9 @@ class AzureSqlMILinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6474,6 +6714,7 @@ class AzureSqlMILinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__(self, **kwargs): @@ -6485,6 +6726,7 @@ def __init__(self, **kwargs): self.tenant = kwargs.get('tenant', None) self.azure_cloud_type = kwargs.get('azure_cloud_type', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) self.type = 'AzureSqlMI' @@ -21737,6 +21979,62 @@ def __init__(self, **kwargs): self.type = 'MongoDbAtlas' +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbAtlasSink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'MongoDbAtlasSink' + + class MongoDbAtlasSource(CopySource): """A copy activity source for a MongoDB Atlas database. @@ -22175,6 +22473,62 @@ def __init__(self, **kwargs): self.type = 'MongoDbV2' +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(MongoDbV2Sink, self).__init__(**kwargs) + self.write_behavior = kwargs.get('write_behavior', None) + self.type = 'MongoDbV2Sink' + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -23545,14 +23899,209 @@ class OperationServiceSpecification(Model): """ _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, **kwargs): + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = kwargs.get('log_specifications', None) + self.metric_specifications = kwargs.get('metric_specifications', None) + + +class OracleCloudStorageLinkedService(LinkedService): + """Linked service for Oracle Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Oracle Cloud + Storage Identity and Access Management (IAM) user. Type: string (or + Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Oracle Cloud + Storage Identity and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + Oracle Cloud Storage Connector. This is an optional property; change it + only if you want to try a different service endpoint or want to switch + between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleCloudStorageLinkedService, self).__init__(**kwargs) + self.access_key_id = kwargs.get('access_key_id', None) + self.secret_access_key = kwargs.get('secret_access_key', None) + self.service_url = kwargs.get('service_url', None) + self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.type = 'OracleCloudStorage' + + +class OracleCloudStorageLocation(DatasetLocation): + """The location of Oracle Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: + string (or Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of Oracle Cloud Storage. Type: string + (or Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, **kwargs): + super(OracleCloudStorageLocation, self).__init__(**kwargs) + self.bucket_name = kwargs.get('bucket_name', None) + self.version = kwargs.get('version', None) + self.type = 'OracleCloudStorageLocation' + + +class OracleCloudStorageReadSettings(StoreReadSettings): + """Oracle Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. + Type: string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Oracle Cloud Storage object name. + Type: string (or Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files + need to be deleted after copy completion. Default is false. Type: boolean + (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } def __init__(self, **kwargs): - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = kwargs.get('log_specifications', None) - self.metric_specifications = kwargs.get('metric_specifications', None) + super(OracleCloudStorageReadSettings, self).__init__(**kwargs) + self.recursive = kwargs.get('recursive', None) + self.wildcard_folder_path = kwargs.get('wildcard_folder_path', None) + self.wildcard_file_name = kwargs.get('wildcard_file_name', None) + self.prefix = kwargs.get('prefix', None) + self.file_list_path = kwargs.get('file_list_path', None) + self.enable_partition_discovery = kwargs.get('enable_partition_discovery', None) + self.partition_root_path = kwargs.get('partition_root_path', None) + self.delete_files_after_completion = kwargs.get('delete_files_after_completion', None) + self.modified_datetime_start = kwargs.get('modified_datetime_start', None) + self.modified_datetime_end = kwargs.get('modified_datetime_end', None) + self.type = 'OracleCloudStorageReadSettings' class OracleLinkedService(LinkedService): @@ -25810,6 +26359,219 @@ def __init__(self, **kwargs): self.type = 'PrestoSource' +class PrivateEndpointConnectionResource(SubResource): + """Private Endpoint Connection ARM resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties + :type properties: + ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, + } + + def __init__(self, **kwargs): + super(PrivateEndpointConnectionResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkConnectionApprovalRequest(Model): + """A request to approve or reject a private endpoint connection. + + :param private_link_service_connection_state: + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + +class PrivateLinkConnectionApprovalRequestResource(SubResource): + """Private Endpoint Connection Approval ARM resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties + :type properties: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkConnectionState(Model): + """The state of a private link connection. + + :param status: Status of a private link connection + :type status: str + :param description: Description of a private link connection + :type description: str + :param actions_required: ActionsRequired for a private link connection + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = kwargs.get('status', None) + self.description = kwargs.get('description', None) + self.actions_required = kwargs.get('actions_required', None) + + +class PrivateLinkResource(SubResource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties + :type properties: + ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkResource, self).__init__(**kwargs) + self.properties = kwargs.get('properties', None) + + +class PrivateLinkResourceProperties(Model): + """Properties of a private link resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar group_id: GroupId of a private link resource + :vartype group_id: str + :ivar required_members: RequiredMembers of a private link resource + :vartype required_members: list[str] + :ivar required_zone_names: RequiredZoneNames of a private link resource + :vartype required_zone_names: list[str] + """ + + _validation = { + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + 'required_zone_names': {'readonly': True}, + } + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkResourceProperties, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = None + + +class PrivateLinkResourcesWrapper(Model): + """Wrapper for a collection of private link resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. + :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__(self, **kwargs): + super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + + class QuickBooksLinkedService(LinkedService): """QuickBooks server linked service. @@ -26257,6 +27019,39 @@ def __init__(self, **kwargs): self.type = 'RelationalTable' +class RemotePrivateEndpointConnection(Model): + """A remote private endpoint connection. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provisioning_state: + :vartype provisioning_state: str + :param private_endpoint: PrivateEndpoint of a remote private endpoint + connection + :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :param private_link_service_connection_state: + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__(self, **kwargs): + super(RemotePrivateEndpointConnection, self).__init__(**kwargs) + self.provisioning_state = None + self.private_endpoint = kwargs.get('private_endpoint', None) + self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) + + class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -31167,6 +31962,42 @@ def __init__(self, **kwargs): self.type = 'SparkSource' +class SqlAlwaysEncryptedProperties(Model): + """Sql always encrypted properties. + + All required parameters must be populated in order to send to Azure. + + :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV + authentication type. Type: string (or Expression with resultType string). + Possible values include: 'ServicePrincipal', 'ManagedIdentity' + :type always_encrypted_akv_auth_type: str or + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType + :param service_principal_id: The client ID of the application in Azure + Active Directory used for Azure Key Vault authentication. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure Key Vault. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'always_encrypted_akv_auth_type': {'required': True}, + } + + _attribute_map = { + 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, + 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, + } + + def __init__(self, **kwargs): + super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) + self.always_encrypted_akv_auth_type = kwargs.get('always_encrypted_akv_auth_type', None) + self.service_principal_id = kwargs.get('service_principal_id', None) + self.service_principal_key = kwargs.get('service_principal_key', None) + + class SqlDWSink(CopySink): """A copy activity SQL Data Warehouse sink. @@ -31563,6 +32394,9 @@ class SqlServerLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -31581,6 +32415,7 @@ class SqlServerLinkedService(LinkedService): 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } def __init__(self, **kwargs): @@ -31589,6 +32424,7 @@ def __init__(self, **kwargs): self.user_name = kwargs.get('user_name', None) self.password = kwargs.get('password', None) self.encrypted_credential = kwargs.get('encrypted_credential', None) + self.always_encrypted_settings = kwargs.get('always_encrypted_settings', None) self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py index 7a57a162ab8f..83e467407e37 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_models_py3.py @@ -361,6 +361,7 @@ class LinkedService(Model): TeradataLinkedService, Db2LinkedService, SybaseLinkedService, PostgreSqlLinkedService, MySqlLinkedService, AzureMySqlLinkedService, OracleLinkedService, GoogleCloudStorageLinkedService, + OracleCloudStorageLinkedService, AmazonS3CompatibleLinkedService, AzureFileStorageLinkedService, FileServerLinkedService, HDInsightLinkedService, CommonDataServiceForAppsLinkedService, DynamicsCrmLinkedService, DynamicsLinkedService, CosmosDbLinkedService, @@ -404,7 +405,7 @@ class LinkedService(Model): } _subtype_map = { - 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} + 'type': {'SharePointOnlineList': 'SharePointOnlineListLinkedService', 'Snowflake': 'SnowflakeLinkedService', 'AzureFunction': 'AzureFunctionLinkedService', 'AzureDataExplorer': 'AzureDataExplorerLinkedService', 'SapTable': 'SapTableLinkedService', 'GoogleAdWords': 'GoogleAdWordsLinkedService', 'OracleServiceCloud': 'OracleServiceCloudLinkedService', 'DynamicsAX': 'DynamicsAXLinkedService', 'Responsys': 'ResponsysLinkedService', 'AzureDatabricksDeltaLake': 'AzureDatabricksDeltaLakeLinkedService', 'AzureDatabricks': 'AzureDatabricksLinkedService', 'AzureDataLakeAnalytics': 'AzureDataLakeAnalyticsLinkedService', 'HDInsightOnDemand': 'HDInsightOnDemandLinkedService', 'SalesforceMarketingCloud': 'SalesforceMarketingCloudLinkedService', 'Netezza': 'NetezzaLinkedService', 'Vertica': 'VerticaLinkedService', 'Zoho': 'ZohoLinkedService', 'Xero': 'XeroLinkedService', 'Square': 'SquareLinkedService', 'Spark': 'SparkLinkedService', 'Shopify': 'ShopifyLinkedService', 'ServiceNow': 'ServiceNowLinkedService', 'QuickBooks': 'QuickBooksLinkedService', 'Presto': 'PrestoLinkedService', 'Phoenix': 'PhoenixLinkedService', 'Paypal': 'PaypalLinkedService', 'Marketo': 'MarketoLinkedService', 'AzureMariaDB': 'AzureMariaDBLinkedService', 'MariaDB': 'MariaDBLinkedService', 'Magento': 'MagentoLinkedService', 'Jira': 'JiraLinkedService', 'Impala': 'ImpalaLinkedService', 'Hubspot': 'HubspotLinkedService', 'Hive': 'HiveLinkedService', 'HBase': 'HBaseLinkedService', 'Greenplum': 'GreenplumLinkedService', 'GoogleBigQuery': 'GoogleBigQueryLinkedService', 'Eloqua': 'EloquaLinkedService', 'Drill': 'DrillLinkedService', 'Couchbase': 'CouchbaseLinkedService', 'Concur': 'ConcurLinkedService', 'AzurePostgreSql': 'AzurePostgreSqlLinkedService', 'AmazonMWS': 'AmazonMWSLinkedService', 'SapHana': 'SapHanaLinkedService', 'SapBW': 'SapBWLinkedService', 'Sftp': 'SftpServerLinkedService', 'FtpServer': 'FtpServerLinkedService', 'HttpServer': 'HttpLinkedService', 'AzureSearch': 'AzureSearchLinkedService', 'CustomDataSource': 'CustomDataSourceLinkedService', 'AmazonRedshift': 'AmazonRedshiftLinkedService', 'AmazonS3': 'AmazonS3LinkedService', 'RestService': 'RestServiceLinkedService', 'SapOpenHub': 'SapOpenHubLinkedService', 'SapEcc': 'SapEccLinkedService', 'SapCloudForCustomer': 'SapCloudForCustomerLinkedService', 'SalesforceServiceCloud': 'SalesforceServiceCloudLinkedService', 'Salesforce': 'SalesforceLinkedService', 'Office365': 'Office365LinkedService', 'AzureBlobFS': 'AzureBlobFSLinkedService', 'AzureDataLakeStore': 'AzureDataLakeStoreLinkedService', 'CosmosDbMongoDbApi': 'CosmosDbMongoDbApiLinkedService', 'MongoDbV2': 'MongoDbV2LinkedService', 'MongoDbAtlas': 'MongoDbAtlasLinkedService', 'MongoDb': 'MongoDbLinkedService', 'Cassandra': 'CassandraLinkedService', 'Web': 'WebLinkedService', 'OData': 'ODataLinkedService', 'Hdfs': 'HdfsLinkedService', 'MicrosoftAccess': 'MicrosoftAccessLinkedService', 'Informix': 'InformixLinkedService', 'Odbc': 'OdbcLinkedService', 'AzureMLService': 'AzureMLServiceLinkedService', 'AzureML': 'AzureMLLinkedService', 'Teradata': 'TeradataLinkedService', 'Db2': 'Db2LinkedService', 'Sybase': 'SybaseLinkedService', 'PostgreSql': 'PostgreSqlLinkedService', 'MySql': 'MySqlLinkedService', 'AzureMySql': 'AzureMySqlLinkedService', 'Oracle': 'OracleLinkedService', 'GoogleCloudStorage': 'GoogleCloudStorageLinkedService', 'OracleCloudStorage': 'OracleCloudStorageLinkedService', 'AmazonS3Compatible': 'AmazonS3CompatibleLinkedService', 'AzureFileStorage': 'AzureFileStorageLinkedService', 'FileServer': 'FileServerLinkedService', 'HDInsight': 'HDInsightLinkedService', 'CommonDataServiceForApps': 'CommonDataServiceForAppsLinkedService', 'DynamicsCrm': 'DynamicsCrmLinkedService', 'Dynamics': 'DynamicsLinkedService', 'CosmosDb': 'CosmosDbLinkedService', 'AzureKeyVault': 'AzureKeyVaultLinkedService', 'AzureBatch': 'AzureBatchLinkedService', 'AzureSqlMI': 'AzureSqlMILinkedService', 'AzureSqlDatabase': 'AzureSqlDatabaseLinkedService', 'SqlServer': 'SqlServerLinkedService', 'AzureSqlDW': 'AzureSqlDWLinkedService', 'AzureTableStorage': 'AzureTableStorageLinkedService', 'AzureBlobStorage': 'AzureBlobStorageLinkedService', 'AzureStorage': 'AzureStorageLinkedService'} } def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: @@ -1073,6 +1074,300 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'AmazonRedshiftTable' +class AmazonS3CompatibleLinkedService(LinkedService): + """Linked service for Amazon S3 Compatible. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Amazon S3 + Compatible Identity and Access Management (IAM) user. Type: string (or + Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Amazon S3 + Compatible Identity and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + Amazon S3 Compatible Connector. This is an optional property; change it + only if you want to try a different service endpoint or want to switch + between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param force_path_style: If true, use S3 path-style access instead of + virtual hosted-style access. Default value is false. Type: boolean (or + Expression with resultType boolean). + :type force_path_style: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'force_path_style': {'key': 'typeProperties.forcePathStyle', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, force_path_style=None, encrypted_credential=None, **kwargs) -> None: + super(AmazonS3CompatibleLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.force_path_style = force_path_style + self.encrypted_credential = encrypted_credential + self.type = 'AmazonS3Compatible' + + +class DatasetLocation(Model): + """Dataset location. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: HdfsLocation, HttpServerLocation, SftpLocation, + FtpServerLocation, GoogleCloudStorageLocation, OracleCloudStorageLocation, + AmazonS3CompatibleLocation, AzureFileStorageLocation, FileServerLocation, + AmazonS3Location, AzureDataLakeStoreLocation, AzureBlobFSLocation, + AzureBlobStorageLocation + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'OracleCloudStorageLocation': 'OracleCloudStorageLocation', 'AmazonS3CompatibleLocation': 'AmazonS3CompatibleLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation'} + } + + def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: + super(DatasetLocation, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.folder_path = folder_path + self.file_name = file_name + self.type = None + + +class AmazonS3CompatibleLocation(DatasetLocation): + """The location of Amazon S3 Compatible dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Specify the bucketName of Amazon S3 Compatible. Type: + string (or Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of Amazon S3 Compatible. Type: string + (or Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(AmazonS3CompatibleLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version + self.type = 'AmazonS3CompatibleLocation' + + +class StoreReadSettings(Model): + """Connector read setting. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: HdfsReadSettings, HttpReadSettings, SftpReadSettings, + FtpReadSettings, GoogleCloudStorageReadSettings, + OracleCloudStorageReadSettings, AmazonS3CompatibleReadSettings, + AzureFileStorageReadSettings, FileServerReadSettings, AmazonS3ReadSettings, + AzureDataLakeStoreReadSettings, AzureBlobFSReadSettings, + AzureBlobStorageReadSettings + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'OracleCloudStorageReadSettings': 'OracleCloudStorageReadSettings', 'AmazonS3CompatibleReadSettings': 'AmazonS3CompatibleReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings'} + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: + super(StoreReadSettings, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.max_concurrent_connections = max_concurrent_connections + self.type = None + + +class AmazonS3CompatibleReadSettings(StoreReadSettings): + """Amazon S3 Compatible read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Amazon S3 Compatible wildcardFolderPath. + Type: string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Amazon S3 Compatible wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the S3 Compatible object name. Type: + string (or Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files + need to be deleted after copy completion. Default is false. Type: boolean + (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(AmazonS3CompatibleReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.file_list_path = file_list_path + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.type = 'AmazonS3CompatibleReadSettings' + + class AmazonS3Dataset(Dataset): """A single Amazon Simple Storage Service (S3) object or a set of S3 objects. @@ -1244,53 +1539,6 @@ def __init__(self, *, additional_properties=None, connect_via=None, description: self.type = 'AmazonS3' -class DatasetLocation(Model): - """Dataset location. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HdfsLocation, HttpServerLocation, SftpLocation, - FtpServerLocation, GoogleCloudStorageLocation, AzureFileStorageLocation, - FileServerLocation, AmazonS3Location, AzureDataLakeStoreLocation, - AzureBlobFSLocation, AzureBlobStorageLocation - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param folder_path: Specify the folder path of dataset. Type: string (or - Expression with resultType string) - :type folder_path: object - :param file_name: Specify the file name of dataset. Type: string (or - Expression with resultType string). - :type file_name: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'folder_path': {'key': 'folderPath', 'type': 'object'}, - 'file_name': {'key': 'fileName', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'HdfsLocation': 'HdfsLocation', 'HttpServerLocation': 'HttpServerLocation', 'SftpLocation': 'SftpLocation', 'FtpServerLocation': 'FtpServerLocation', 'GoogleCloudStorageLocation': 'GoogleCloudStorageLocation', 'AzureFileStorageLocation': 'AzureFileStorageLocation', 'FileServerLocation': 'FileServerLocation', 'AmazonS3Location': 'AmazonS3Location', 'AzureDataLakeStoreLocation': 'AzureDataLakeStoreLocation', 'AzureBlobFSLocation': 'AzureBlobFSLocation', 'AzureBlobStorageLocation': 'AzureBlobStorageLocation'} - } - - def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, **kwargs) -> None: - super(DatasetLocation, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.folder_path = folder_path - self.file_name = file_name - self.type = None - - class AmazonS3Location(DatasetLocation): """The location of amazon S3 dataset. @@ -1335,52 +1583,8 @@ def __init__(self, *, additional_properties=None, folder_path=None, file_name=No self.type = 'AmazonS3Location' -class StoreReadSettings(Model): - """Connector read setting. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: HdfsReadSettings, HttpReadSettings, SftpReadSettings, - FtpReadSettings, GoogleCloudStorageReadSettings, - AzureFileStorageReadSettings, FileServerReadSettings, AmazonS3ReadSettings, - AzureDataLakeStoreReadSettings, AzureBlobFSReadSettings, - AzureBlobStorageReadSettings - - All required parameters must be populated in order to send to Azure. - - :param additional_properties: Unmatched properties from the message are - deserialized this collection - :type additional_properties: dict[str, object] - :param max_concurrent_connections: The maximum concurrent connection count - for the source data store. Type: integer (or Expression with resultType - integer). - :type max_concurrent_connections: object - :param type: Required. Constant filled by server. - :type type: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'HdfsReadSettings': 'HdfsReadSettings', 'HttpReadSettings': 'HttpReadSettings', 'SftpReadSettings': 'SftpReadSettings', 'FtpReadSettings': 'FtpReadSettings', 'GoogleCloudStorageReadSettings': 'GoogleCloudStorageReadSettings', 'AzureFileStorageReadSettings': 'AzureFileStorageReadSettings', 'FileServerReadSettings': 'FileServerReadSettings', 'AmazonS3ReadSettings': 'AmazonS3ReadSettings', 'AzureDataLakeStoreReadSettings': 'AzureDataLakeStoreReadSettings', 'AzureBlobFSReadSettings': 'AzureBlobFSReadSettings', 'AzureBlobStorageReadSettings': 'AzureBlobStorageReadSettings'} - } - - def __init__(self, *, additional_properties=None, max_concurrent_connections=None, **kwargs) -> None: - super(StoreReadSettings, self).__init__(**kwargs) - self.additional_properties = additional_properties - self.max_concurrent_connections = max_concurrent_connections - self.type = None - - class AmazonS3ReadSettings(StoreReadSettings): - """Azure data lake store read settings. + """Amazon S3 read settings. All required parameters must be populated in order to send to Azure. @@ -1561,6 +1765,29 @@ def __init__(self, *, name: str, additional_properties=None, description: str=No self.type = 'AppendVariable' +class ArmIdWrapper(Model): + """A wrapper for an ARM resource id. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: + :vartype id: str + """ + + _validation = { + 'id': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + } + + def __init__(self, **kwargs) -> None: + super(ArmIdWrapper, self).__init__(**kwargs) + self.id = None + + class AvroDataset(Dataset): """Avro dataset. @@ -1715,11 +1942,12 @@ class CopySink(Model): """A copy activity sink. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CosmosDbMongoDbApiSink, SalesforceServiceCloudSink, - SalesforceSink, AzureDataExplorerSink, CommonDataServiceForAppsSink, - DynamicsCrmSink, DynamicsSink, MicrosoftAccessSink, InformixSink, OdbcSink, - AzureSearchIndexSink, AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, - SnowflakeSink, SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, + sub-classes are: CosmosDbMongoDbApiSink, MongoDbV2Sink, MongoDbAtlasSink, + SalesforceServiceCloudSink, SalesforceSink, AzureDataExplorerSink, + CommonDataServiceForAppsSink, DynamicsCrmSink, DynamicsSink, + MicrosoftAccessSink, InformixSink, OdbcSink, AzureSearchIndexSink, + AzureBlobFSSink, AzureDataLakeStoreSink, OracleSink, SnowflakeSink, + SqlDWSink, SqlMISink, AzureSqlSink, SqlServerSink, SqlSink, CosmosDbSqlApiSink, DocumentDbCollectionSink, FileSystemSink, BlobSink, BinarySink, ParquetSink, AvroSink, AzureTableSink, AzureQueueSink, SapCloudForCustomerSink, AzureDatabricksDeltaLakeSink, AzureMySqlSink, @@ -1767,7 +1995,7 @@ class CopySink(Model): } _subtype_map = { - 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} + 'type': {'CosmosDbMongoDbApiSink': 'CosmosDbMongoDbApiSink', 'MongoDbV2Sink': 'MongoDbV2Sink', 'MongoDbAtlasSink': 'MongoDbAtlasSink', 'SalesforceServiceCloudSink': 'SalesforceServiceCloudSink', 'SalesforceSink': 'SalesforceSink', 'AzureDataExplorerSink': 'AzureDataExplorerSink', 'CommonDataServiceForAppsSink': 'CommonDataServiceForAppsSink', 'DynamicsCrmSink': 'DynamicsCrmSink', 'DynamicsSink': 'DynamicsSink', 'MicrosoftAccessSink': 'MicrosoftAccessSink', 'InformixSink': 'InformixSink', 'OdbcSink': 'OdbcSink', 'AzureSearchIndexSink': 'AzureSearchIndexSink', 'AzureBlobFSSink': 'AzureBlobFSSink', 'AzureDataLakeStoreSink': 'AzureDataLakeStoreSink', 'OracleSink': 'OracleSink', 'SnowflakeSink': 'SnowflakeSink', 'SqlDWSink': 'SqlDWSink', 'SqlMISink': 'SqlMISink', 'AzureSqlSink': 'AzureSqlSink', 'SqlServerSink': 'SqlServerSink', 'SqlSink': 'SqlSink', 'CosmosDbSqlApiSink': 'CosmosDbSqlApiSink', 'DocumentDbCollectionSink': 'DocumentDbCollectionSink', 'FileSystemSink': 'FileSystemSink', 'BlobSink': 'BlobSink', 'BinarySink': 'BinarySink', 'ParquetSink': 'ParquetSink', 'AvroSink': 'AvroSink', 'AzureTableSink': 'AzureTableSink', 'AzureQueueSink': 'AzureQueueSink', 'SapCloudForCustomerSink': 'SapCloudForCustomerSink', 'AzureDatabricksDeltaLakeSink': 'AzureDatabricksDeltaLakeSink', 'AzureMySqlSink': 'AzureMySqlSink', 'AzurePostgreSqlSink': 'AzurePostgreSqlSink', 'RestSink': 'RestSink', 'OrcSink': 'OrcSink', 'JsonSink': 'JsonSink', 'DelimitedTextSink': 'DelimitedTextSink'} } def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, **kwargs) -> None: @@ -2716,6 +2944,11 @@ class AzureBlobStorageLinkedService(LinkedService): AzureUsGovernment, AzureGermany. Default value is the data factory regions’ cloud type. Type: string (or Expression with resultType string). :type azure_cloud_type: object + :param account_kind: Specify the kind of your storage account. Allowed + values are: Storage (general purpose v1), StorageV2 (general purpose v2), + BlobStorage, or BlockBlobStorage. Type: string (or Expression with + resultType string). + :type account_kind: str :param encrypted_credential: The encrypted credential used for authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). @@ -2742,10 +2975,11 @@ class AzureBlobStorageLinkedService(LinkedService): 'service_principal_key': {'key': 'typeProperties.servicePrincipalKey', 'type': 'SecretBase'}, 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, + 'account_kind': {'key': 'typeProperties.accountKind', 'type': 'str'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'str'}, } - def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential: str=None, **kwargs) -> None: + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, connection_string=None, account_key=None, sas_uri=None, sas_token=None, service_endpoint: str=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, account_kind: str=None, encrypted_credential: str=None, **kwargs) -> None: super(AzureBlobStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.account_key = account_key @@ -2756,6 +2990,7 @@ def __init__(self, *, additional_properties=None, connect_via=None, description: self.service_principal_key = service_principal_key self.tenant = tenant self.azure_cloud_type = azure_cloud_type + self.account_kind = account_kind self.encrypted_credential = encrypted_credential self.type = 'AzureBlobStorage' @@ -3611,28 +3846,25 @@ class AzureDataExplorerLinkedService(LinkedService): https://..kusto.windows.net. Type: string (or Expression with resultType string) :type endpoint: object - :param service_principal_id: Required. The ID of the service principal - used to authenticate against Azure Data Explorer. Type: string (or - Expression with resultType string). + :param service_principal_id: The ID of the service principal used to + authenticate against Azure Data Explorer. Type: string (or Expression with + resultType string). :type service_principal_id: object - :param service_principal_key: Required. The key of the service principal - used to authenticate against Kusto. + :param service_principal_key: The key of the service principal used to + authenticate against Kusto. :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase :param database: Required. Database name for connection. Type: string (or Expression with resultType string). :type database: object - :param tenant: Required. The name or ID of the tenant to which the service - principal belongs. Type: string (or Expression with resultType string). + :param tenant: The name or ID of the tenant to which the service principal + belongs. Type: string (or Expression with resultType string). :type tenant: object """ _validation = { 'type': {'required': True}, 'endpoint': {'required': True}, - 'service_principal_id': {'required': True}, - 'service_principal_key': {'required': True}, 'database': {'required': True}, - 'tenant': {'required': True}, } _attribute_map = { @@ -3649,7 +3881,7 @@ class AzureDataExplorerLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, } - def __init__(self, *, endpoint, service_principal_id, service_principal_key, database, tenant, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + def __init__(self, *, endpoint, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, service_principal_id=None, service_principal_key=None, tenant=None, **kwargs) -> None: super(AzureDataExplorerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.endpoint = endpoint self.service_principal_id = service_principal_id @@ -6218,6 +6450,9 @@ class AzureSqlDatabaseLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6239,9 +6474,10 @@ class AzureSqlDatabaseLinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, always_encrypted_settings=None, **kwargs) -> None: super(AzureSqlDatabaseLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password @@ -6250,6 +6486,7 @@ def __init__(self, *, connection_string, additional_properties=None, connect_via self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings self.type = 'AzureSqlDatabase' @@ -6453,6 +6690,9 @@ class AzureSqlMILinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -6474,9 +6714,10 @@ class AzureSqlMILinkedService(LinkedService): 'tenant': {'key': 'typeProperties.tenant', 'type': 'object'}, 'azure_cloud_type': {'key': 'typeProperties.azureCloudType', 'type': 'object'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, password=None, service_principal_id=None, service_principal_key=None, tenant=None, azure_cloud_type=None, encrypted_credential=None, always_encrypted_settings=None, **kwargs) -> None: super(AzureSqlMILinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.password = password @@ -6485,6 +6726,7 @@ def __init__(self, *, connection_string, additional_properties=None, connect_via self.tenant = tenant self.azure_cloud_type = azure_cloud_type self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings self.type = 'AzureSqlMI' @@ -21726,15 +21968,71 @@ class MongoDbAtlasLinkedService(LinkedService): 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, 'annotations': {'key': 'annotations', 'type': '[object]'}, 'type': {'key': 'type', 'type': 'str'}, - 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, - 'database': {'key': 'typeProperties.database', 'type': 'object'}, + 'connection_string': {'key': 'typeProperties.connectionString', 'type': 'object'}, + 'database': {'key': 'typeProperties.database', 'type': 'object'}, + } + + def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: + super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.connection_string = connection_string + self.database = database + self.type = 'MongoDbAtlas' + + +class MongoDbAtlasSink(CopySink): + """A copy activity MongoDB Atlas sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, } - def __init__(self, *, connection_string, database, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, **kwargs) -> None: - super(MongoDbAtlasLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) - self.connection_string = connection_string - self.database = database - self.type = 'MongoDbAtlas' + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(MongoDbAtlasSink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'MongoDbAtlasSink' class MongoDbAtlasSource(CopySource): @@ -22175,6 +22473,62 @@ def __init__(self, *, connection_string, database, additional_properties=None, c self.type = 'MongoDbV2' +class MongoDbV2Sink(CopySink): + """A copy activity MongoDB sink. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param write_batch_size: Write batch size. Type: integer (or Expression + with resultType integer), minimum: 0. + :type write_batch_size: object + :param write_batch_timeout: Write batch timeout. Type: string (or + Expression with resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type write_batch_timeout: object + :param sink_retry_count: Sink retry count. Type: integer (or Expression + with resultType integer). + :type sink_retry_count: object + :param sink_retry_wait: Sink retry wait. Type: string (or Expression with + resultType string), pattern: + ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type sink_retry_wait: object + :param max_concurrent_connections: The maximum concurrent connection count + for the sink data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param write_behavior: Specifies whether the document with same key to be + overwritten (upsert) rather than throw exception (insert). The default + value is "insert". Type: string (or Expression with resultType string). + Type: string (or Expression with resultType string). + :type write_behavior: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'write_batch_size': {'key': 'writeBatchSize', 'type': 'object'}, + 'write_batch_timeout': {'key': 'writeBatchTimeout', 'type': 'object'}, + 'sink_retry_count': {'key': 'sinkRetryCount', 'type': 'object'}, + 'sink_retry_wait': {'key': 'sinkRetryWait', 'type': 'object'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'write_behavior': {'key': 'writeBehavior', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, write_batch_size=None, write_batch_timeout=None, sink_retry_count=None, sink_retry_wait=None, max_concurrent_connections=None, write_behavior=None, **kwargs) -> None: + super(MongoDbV2Sink, self).__init__(additional_properties=additional_properties, write_batch_size=write_batch_size, write_batch_timeout=write_batch_timeout, sink_retry_count=sink_retry_count, sink_retry_wait=sink_retry_wait, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.write_behavior = write_behavior + self.type = 'MongoDbV2Sink' + + class MongoDbV2Source(CopySource): """A copy activity source for a MongoDB database. @@ -23506,53 +23860,248 @@ class OperationMetricSpecification(Model): list[~azure.mgmt.datafactory.models.OperationMetricDimension] """ - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'display_description': {'key': 'displayDescription', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, - 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, - 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, - 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, - 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, - 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'display_name': {'key': 'displayName', 'type': 'str'}, + 'display_description': {'key': 'displayDescription', 'type': 'str'}, + 'unit': {'key': 'unit', 'type': 'str'}, + 'aggregation_type': {'key': 'aggregationType', 'type': 'str'}, + 'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'str'}, + 'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'}, + 'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'}, + 'availabilities': {'key': 'availabilities', 'type': '[OperationMetricAvailability]'}, + 'dimensions': {'key': 'dimensions', 'type': '[OperationMetricDimension]'}, + } + + def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: + super(OperationMetricSpecification, self).__init__(**kwargs) + self.name = name + self.display_name = display_name + self.display_description = display_description + self.unit = unit + self.aggregation_type = aggregation_type + self.enable_regional_mdm_account = enable_regional_mdm_account + self.source_mdm_account = source_mdm_account + self.source_mdm_namespace = source_mdm_namespace + self.availabilities = availabilities + self.dimensions = dimensions + + +class OperationServiceSpecification(Model): + """Details about a service operation. + + :param log_specifications: Details about operations related to logs. + :type log_specifications: + list[~azure.mgmt.datafactory.models.OperationLogSpecification] + :param metric_specifications: Details about operations related to metrics. + :type metric_specifications: + list[~azure.mgmt.datafactory.models.OperationMetricSpecification] + """ + + _attribute_map = { + 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, + 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + } + + def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: + super(OperationServiceSpecification, self).__init__(**kwargs) + self.log_specifications = log_specifications + self.metric_specifications = metric_specifications + + +class OracleCloudStorageLinkedService(LinkedService): + """Linked service for Oracle Cloud Storage. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param connect_via: The integration runtime reference. + :type connect_via: + ~azure.mgmt.datafactory.models.IntegrationRuntimeReference + :param description: Linked service description. + :type description: str + :param parameters: Parameters for linked service. + :type parameters: dict[str, + ~azure.mgmt.datafactory.models.ParameterSpecification] + :param annotations: List of tags that can be used for describing the + linked service. + :type annotations: list[object] + :param type: Required. Constant filled by server. + :type type: str + :param access_key_id: The access key identifier of the Oracle Cloud + Storage Identity and Access Management (IAM) user. Type: string (or + Expression with resultType string). + :type access_key_id: object + :param secret_access_key: The secret access key of the Oracle Cloud + Storage Identity and Access Management (IAM) user. + :type secret_access_key: ~azure.mgmt.datafactory.models.SecretBase + :param service_url: This value specifies the endpoint to access with the + Oracle Cloud Storage Connector. This is an optional property; change it + only if you want to try a different service endpoint or want to switch + between https and http. Type: string (or Expression with resultType + string). + :type service_url: object + :param encrypted_credential: The encrypted credential used for + authentication. Credentials are encrypted using the integration runtime + credential manager. Type: string (or Expression with resultType string). + :type encrypted_credential: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'connect_via': {'key': 'connectVia', 'type': 'IntegrationRuntimeReference'}, + 'description': {'key': 'description', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'access_key_id': {'key': 'typeProperties.accessKeyId', 'type': 'object'}, + 'secret_access_key': {'key': 'typeProperties.secretAccessKey', 'type': 'SecretBase'}, + 'service_url': {'key': 'typeProperties.serviceUrl', 'type': 'object'}, + 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, access_key_id=None, secret_access_key=None, service_url=None, encrypted_credential=None, **kwargs) -> None: + super(OracleCloudStorageLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) + self.access_key_id = access_key_id + self.secret_access_key = secret_access_key + self.service_url = service_url + self.encrypted_credential = encrypted_credential + self.type = 'OracleCloudStorage' + + +class OracleCloudStorageLocation(DatasetLocation): + """The location of Oracle Cloud Storage dataset. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param folder_path: Specify the folder path of dataset. Type: string (or + Expression with resultType string) + :type folder_path: object + :param file_name: Specify the file name of dataset. Type: string (or + Expression with resultType string). + :type file_name: object + :param type: Required. Constant filled by server. + :type type: str + :param bucket_name: Specify the bucketName of Oracle Cloud Storage. Type: + string (or Expression with resultType string) + :type bucket_name: object + :param version: Specify the version of Oracle Cloud Storage. Type: string + (or Expression with resultType string). + :type version: object + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'folder_path': {'key': 'folderPath', 'type': 'object'}, + 'file_name': {'key': 'fileName', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'bucket_name': {'key': 'bucketName', 'type': 'object'}, + 'version': {'key': 'version', 'type': 'object'}, + } + + def __init__(self, *, additional_properties=None, folder_path=None, file_name=None, bucket_name=None, version=None, **kwargs) -> None: + super(OracleCloudStorageLocation, self).__init__(additional_properties=additional_properties, folder_path=folder_path, file_name=file_name, **kwargs) + self.bucket_name = bucket_name + self.version = version + self.type = 'OracleCloudStorageLocation' + + +class OracleCloudStorageReadSettings(StoreReadSettings): + """Oracle Cloud Storage read settings. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are + deserialized this collection + :type additional_properties: dict[str, object] + :param max_concurrent_connections: The maximum concurrent connection count + for the source data store. Type: integer (or Expression with resultType + integer). + :type max_concurrent_connections: object + :param type: Required. Constant filled by server. + :type type: str + :param recursive: If true, files under the folder path will be read + recursively. Default is true. Type: boolean (or Expression with resultType + boolean). + :type recursive: object + :param wildcard_folder_path: Oracle Cloud Storage wildcardFolderPath. + Type: string (or Expression with resultType string). + :type wildcard_folder_path: object + :param wildcard_file_name: Oracle Cloud Storage wildcardFileName. Type: + string (or Expression with resultType string). + :type wildcard_file_name: object + :param prefix: The prefix filter for the Oracle Cloud Storage object name. + Type: string (or Expression with resultType string). + :type prefix: object + :param file_list_path: Point to a text file that lists each file (relative + path to the path configured in the dataset) that you want to copy. Type: + string (or Expression with resultType string). + :type file_list_path: object + :param enable_partition_discovery: Indicates whether to enable partition + discovery. + :type enable_partition_discovery: bool + :param partition_root_path: Specify the root path where partition + discovery starts from. Type: string (or Expression with resultType + string). + :type partition_root_path: object + :param delete_files_after_completion: Indicates whether the source files + need to be deleted after copy completion. Default is false. Type: boolean + (or Expression with resultType boolean). + :type delete_files_after_completion: object + :param modified_datetime_start: The start of file's modified datetime. + Type: string (or Expression with resultType string). + :type modified_datetime_start: object + :param modified_datetime_end: The end of file's modified datetime. Type: + string (or Expression with resultType string). + :type modified_datetime_end: object + """ + + _validation = { + 'type': {'required': True}, } - def __init__(self, *, name: str=None, display_name: str=None, display_description: str=None, unit: str=None, aggregation_type: str=None, enable_regional_mdm_account: str=None, source_mdm_account: str=None, source_mdm_namespace: str=None, availabilities=None, dimensions=None, **kwargs) -> None: - super(OperationMetricSpecification, self).__init__(**kwargs) - self.name = name - self.display_name = display_name - self.display_description = display_description - self.unit = unit - self.aggregation_type = aggregation_type - self.enable_regional_mdm_account = enable_regional_mdm_account - self.source_mdm_account = source_mdm_account - self.source_mdm_namespace = source_mdm_namespace - self.availabilities = availabilities - self.dimensions = dimensions - - -class OperationServiceSpecification(Model): - """Details about a service operation. - - :param log_specifications: Details about operations related to logs. - :type log_specifications: - list[~azure.mgmt.datafactory.models.OperationLogSpecification] - :param metric_specifications: Details about operations related to metrics. - :type metric_specifications: - list[~azure.mgmt.datafactory.models.OperationMetricSpecification] - """ - _attribute_map = { - 'log_specifications': {'key': 'logSpecifications', 'type': '[OperationLogSpecification]'}, - 'metric_specifications': {'key': 'metricSpecifications', 'type': '[OperationMetricSpecification]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'max_concurrent_connections': {'key': 'maxConcurrentConnections', 'type': 'object'}, + 'type': {'key': 'type', 'type': 'str'}, + 'recursive': {'key': 'recursive', 'type': 'object'}, + 'wildcard_folder_path': {'key': 'wildcardFolderPath', 'type': 'object'}, + 'wildcard_file_name': {'key': 'wildcardFileName', 'type': 'object'}, + 'prefix': {'key': 'prefix', 'type': 'object'}, + 'file_list_path': {'key': 'fileListPath', 'type': 'object'}, + 'enable_partition_discovery': {'key': 'enablePartitionDiscovery', 'type': 'bool'}, + 'partition_root_path': {'key': 'partitionRootPath', 'type': 'object'}, + 'delete_files_after_completion': {'key': 'deleteFilesAfterCompletion', 'type': 'object'}, + 'modified_datetime_start': {'key': 'modifiedDatetimeStart', 'type': 'object'}, + 'modified_datetime_end': {'key': 'modifiedDatetimeEnd', 'type': 'object'}, } - def __init__(self, *, log_specifications=None, metric_specifications=None, **kwargs) -> None: - super(OperationServiceSpecification, self).__init__(**kwargs) - self.log_specifications = log_specifications - self.metric_specifications = metric_specifications + def __init__(self, *, additional_properties=None, max_concurrent_connections=None, recursive=None, wildcard_folder_path=None, wildcard_file_name=None, prefix=None, file_list_path=None, enable_partition_discovery: bool=None, partition_root_path=None, delete_files_after_completion=None, modified_datetime_start=None, modified_datetime_end=None, **kwargs) -> None: + super(OracleCloudStorageReadSettings, self).__init__(additional_properties=additional_properties, max_concurrent_connections=max_concurrent_connections, **kwargs) + self.recursive = recursive + self.wildcard_folder_path = wildcard_folder_path + self.wildcard_file_name = wildcard_file_name + self.prefix = prefix + self.file_list_path = file_list_path + self.enable_partition_discovery = enable_partition_discovery + self.partition_root_path = partition_root_path + self.delete_files_after_completion = delete_files_after_completion + self.modified_datetime_start = modified_datetime_start + self.modified_datetime_end = modified_datetime_end + self.type = 'OracleCloudStorageReadSettings' class OracleLinkedService(LinkedService): @@ -25810,6 +26359,219 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'PrestoSource' +class PrivateEndpointConnectionResource(SubResource): + """Private Endpoint Connection ARM resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties + :type properties: + ~azure.mgmt.datafactory.models.RemotePrivateEndpointConnection + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'RemotePrivateEndpointConnection'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(PrivateEndpointConnectionResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkConnectionApprovalRequest(Model): + """A request to approve or reject a private endpoint connection. + + :param private_link_service_connection_state: + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _attribute_map = { + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__(self, *, private_link_service_connection_state=None, **kwargs) -> None: + super(PrivateLinkConnectionApprovalRequest, self).__init__(**kwargs) + self.private_link_service_connection_state = private_link_service_connection_state + + +class PrivateLinkConnectionApprovalRequestResource(SubResource): + """Private Endpoint Connection Approval ARM resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties + :type properties: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkConnectionApprovalRequest'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(PrivateLinkConnectionApprovalRequestResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkConnectionState(Model): + """The state of a private link connection. + + :param status: Status of a private link connection + :type status: str + :param description: Description of a private link connection + :type description: str + :param actions_required: ActionsRequired for a private link connection + :type actions_required: str + """ + + _attribute_map = { + 'status': {'key': 'status', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, + } + + def __init__(self, *, status: str=None, description: str=None, actions_required: str=None, **kwargs) -> None: + super(PrivateLinkConnectionState, self).__init__(**kwargs) + self.status = status + self.description = description + self.actions_required = actions_required + + +class PrivateLinkResource(SubResource): + """A private link resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Core resource properties + :type properties: + ~azure.mgmt.datafactory.models.PrivateLinkResourceProperties + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'}, + } + + def __init__(self, *, properties=None, **kwargs) -> None: + super(PrivateLinkResource, self).__init__(**kwargs) + self.properties = properties + + +class PrivateLinkResourceProperties(Model): + """Properties of a private link resource. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar group_id: GroupId of a private link resource + :vartype group_id: str + :ivar required_members: RequiredMembers of a private link resource + :vartype required_members: list[str] + :ivar required_zone_names: RequiredZoneNames of a private link resource + :vartype required_zone_names: list[str] + """ + + _validation = { + 'group_id': {'readonly': True}, + 'required_members': {'readonly': True}, + 'required_zone_names': {'readonly': True}, + } + + _attribute_map = { + 'group_id': {'key': 'groupId', 'type': 'str'}, + 'required_members': {'key': 'requiredMembers', 'type': '[str]'}, + 'required_zone_names': {'key': 'requiredZoneNames', 'type': '[str]'}, + } + + def __init__(self, **kwargs) -> None: + super(PrivateLinkResourceProperties, self).__init__(**kwargs) + self.group_id = None + self.required_members = None + self.required_zone_names = None + + +class PrivateLinkResourcesWrapper(Model): + """Wrapper for a collection of private link resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. + :type value: list[~azure.mgmt.datafactory.models.PrivateLinkResource] + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + } + + def __init__(self, *, value, **kwargs) -> None: + super(PrivateLinkResourcesWrapper, self).__init__(**kwargs) + self.value = value + + class QuickBooksLinkedService(LinkedService): """QuickBooks server linked service. @@ -26257,6 +27019,39 @@ def __init__(self, *, linked_service_name, additional_properties=None, descripti self.type = 'RelationalTable' +class RemotePrivateEndpointConnection(Model): + """A remote private endpoint connection. + + Variables are only populated by the server, and will be ignored when + sending a request. + + :ivar provisioning_state: + :vartype provisioning_state: str + :param private_endpoint: PrivateEndpoint of a remote private endpoint + connection + :type private_endpoint: ~azure.mgmt.datafactory.models.ArmIdWrapper + :param private_link_service_connection_state: + :type private_link_service_connection_state: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionState + """ + + _validation = { + 'provisioning_state': {'readonly': True}, + } + + _attribute_map = { + 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + 'private_endpoint': {'key': 'privateEndpoint', 'type': 'ArmIdWrapper'}, + 'private_link_service_connection_state': {'key': 'privateLinkServiceConnectionState', 'type': 'PrivateLinkConnectionState'}, + } + + def __init__(self, *, private_endpoint=None, private_link_service_connection_state=None, **kwargs) -> None: + super(RemotePrivateEndpointConnection, self).__init__(**kwargs) + self.provisioning_state = None + self.private_endpoint = private_endpoint + self.private_link_service_connection_state = private_link_service_connection_state + + class RerunTumblingWindowTrigger(Trigger): """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. @@ -31167,6 +31962,42 @@ def __init__(self, *, additional_properties=None, source_retry_count=None, sourc self.type = 'SparkSource' +class SqlAlwaysEncryptedProperties(Model): + """Sql always encrypted properties. + + All required parameters must be populated in order to send to Azure. + + :param always_encrypted_akv_auth_type: Required. Sql always encrypted AKV + authentication type. Type: string (or Expression with resultType string). + Possible values include: 'ServicePrincipal', 'ManagedIdentity' + :type always_encrypted_akv_auth_type: str or + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedAkvAuthType + :param service_principal_id: The client ID of the application in Azure + Active Directory used for Azure Key Vault authentication. Type: string (or + Expression with resultType string). + :type service_principal_id: object + :param service_principal_key: The key of the service principal used to + authenticate against Azure Key Vault. + :type service_principal_key: ~azure.mgmt.datafactory.models.SecretBase + """ + + _validation = { + 'always_encrypted_akv_auth_type': {'required': True}, + } + + _attribute_map = { + 'always_encrypted_akv_auth_type': {'key': 'alwaysEncryptedAkvAuthType', 'type': 'str'}, + 'service_principal_id': {'key': 'servicePrincipalId', 'type': 'object'}, + 'service_principal_key': {'key': 'servicePrincipalKey', 'type': 'SecretBase'}, + } + + def __init__(self, *, always_encrypted_akv_auth_type, service_principal_id=None, service_principal_key=None, **kwargs) -> None: + super(SqlAlwaysEncryptedProperties, self).__init__(**kwargs) + self.always_encrypted_akv_auth_type = always_encrypted_akv_auth_type + self.service_principal_id = service_principal_id + self.service_principal_key = service_principal_key + + class SqlDWSink(CopySink): """A copy activity SQL Data Warehouse sink. @@ -31563,6 +32394,9 @@ class SqlServerLinkedService(LinkedService): authentication. Credentials are encrypted using the integration runtime credential manager. Type: string (or Expression with resultType string). :type encrypted_credential: object + :param always_encrypted_settings: Sql always encrypted properties. + :type always_encrypted_settings: + ~azure.mgmt.datafactory.models.SqlAlwaysEncryptedProperties """ _validation = { @@ -31581,14 +32415,16 @@ class SqlServerLinkedService(LinkedService): 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, 'password': {'key': 'typeProperties.password', 'type': 'SecretBase'}, 'encrypted_credential': {'key': 'typeProperties.encryptedCredential', 'type': 'object'}, + 'always_encrypted_settings': {'key': 'typeProperties.alwaysEncryptedSettings', 'type': 'SqlAlwaysEncryptedProperties'}, } - def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, **kwargs) -> None: + def __init__(self, *, connection_string, additional_properties=None, connect_via=None, description: str=None, parameters=None, annotations=None, user_name=None, password=None, encrypted_credential=None, always_encrypted_settings=None, **kwargs) -> None: super(SqlServerLinkedService, self).__init__(additional_properties=additional_properties, connect_via=connect_via, description=description, parameters=parameters, annotations=annotations, **kwargs) self.connection_string = connection_string self.user_name = user_name self.password = password self.encrypted_credential = encrypted_credential + self.always_encrypted_settings = always_encrypted_settings self.type = 'SqlServer' diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py index dd21c5c251e7..afe2589aeb25 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/_paged_models.py @@ -155,3 +155,16 @@ class ManagedPrivateEndpointResourcePaged(Paged): def __init__(self, *args, **kwargs): super(ManagedPrivateEndpointResourcePaged, self).__init__(*args, **kwargs) +class PrivateEndpointConnectionResourcePaged(Paged): + """ + A paging container for iterating over a list of :class:`PrivateEndpointConnectionResource ` object + """ + + _attribute_map = { + 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'current_page': {'key': 'value', 'type': '[PrivateEndpointConnectionResource]'} + } + + def __init__(self, *args, **kwargs): + + super(PrivateEndpointConnectionResourcePaged, self).__init__(*args, **kwargs) diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py index 8895bb8bb30b..4325455ce980 100644 --- a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/__init__.py @@ -26,6 +26,9 @@ from ._data_flow_debug_session_operations import DataFlowDebugSessionOperations from ._managed_virtual_networks_operations import ManagedVirtualNetworksOperations from ._managed_private_endpoints_operations import ManagedPrivateEndpointsOperations +from ._private_end_point_connections_operations import PrivateEndPointConnectionsOperations +from ._private_endpoint_connection_operations import PrivateEndpointConnectionOperations +from ._private_link_resources_operations import PrivateLinkResourcesOperations __all__ = [ 'Operations', @@ -45,4 +48,7 @@ 'DataFlowDebugSessionOperations', 'ManagedVirtualNetworksOperations', 'ManagedPrivateEndpointsOperations', + 'PrivateEndPointConnectionsOperations', + 'PrivateEndpointConnectionOperations', + 'PrivateLinkResourcesOperations', ] diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py new file mode 100644 index 000000000000..37cdd6f47f43 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_end_point_connections_operations.py @@ -0,0 +1,113 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class PrivateEndPointConnectionsOperations(object): + """PrivateEndPointConnectionsOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def list_by_factory( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Lists Private endpoint connections. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: An iterator like instance of + PrivateEndpointConnectionResource + :rtype: + ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResourcePaged[~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource] + :raises: :class:`CloudError` + """ + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + else: + url = next_link + query_parameters = {} + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def internal_paging(next_link=None): + request = prepare_request(next_link) + + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + return response + + # Deserialize response + header_dict = None + if raw: + header_dict = {} + deserialized = models.PrivateEndpointConnectionResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) + + return deserialized + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndPointConnections'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py new file mode 100644 index 000000000000..b185cd7e9f59 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_endpoint_connection_operations.py @@ -0,0 +1,252 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class PrivateEndpointConnectionOperations(object): + """PrivateEndpointConnectionOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def create_or_update( + self, resource_group_name, factory_name, private_endpoint_connection_name, if_match=None, properties=None, custom_headers=None, raw=False, **operation_config): + """Approves or rejects a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint + connection name. + :type private_endpoint_connection_name: str + :param if_match: ETag of the private endpoint connection entity. + Should only be specified for update, for which it should match + existing entity or can be * for unconditional update. + :type if_match: str + :param properties: Core resource properties + :type properties: + ~azure.mgmt.datafactory.models.PrivateLinkConnectionApprovalRequest + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateEndpointConnectionResource or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + private_endpoint_wrapper = models.PrivateLinkConnectionApprovalRequestResource(properties=properties) + + # Construct URL + url = self.create_or_update.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + header_parameters['Content-Type'] = 'application/json; charset=utf-8' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct body + body_content = self._serialize.body(private_endpoint_wrapper, 'PrivateLinkConnectionApprovalRequestResource') + + # Construct and send request + request = self._client.put(url, query_parameters, header_parameters, body_content) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnectionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} + + def get( + self, resource_group_name, factory_name, private_endpoint_connection_name, if_none_match=None, custom_headers=None, raw=False, **operation_config): + """Gets a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint + connection name. + :type private_endpoint_connection_name: str + :param if_none_match: ETag of the private endpoint connection entity. + Should only be specified for get. If the ETag matches the existing + entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateEndpointConnectionResource or ClientRawResponse if + raw=true + :rtype: + ~azure.mgmt.datafactory.models.PrivateEndpointConnectionResource or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateEndpointConnectionResource', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} + + def delete( + self, resource_group_name, factory_name, private_endpoint_connection_name, custom_headers=None, raw=False, **operation_config): + """Deletes a private endpoint connection. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param private_endpoint_connection_name: The private endpoint + connection name. + :type private_endpoint_connection_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: None or ClientRawResponse if raw=true + :rtype: None or ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.delete.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'privateEndpointConnectionName': self._serialize.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200, 204]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + if raw: + client_raw_response = ClientRawResponse(None, response) + return client_raw_response + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateEndpointConnections/{privateEndpointConnectionName}'} diff --git a/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py new file mode 100644 index 000000000000..9c50c2df5041 --- /dev/null +++ b/sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/operations/_private_link_resources_operations.py @@ -0,0 +1,101 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for +# license information. +# +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is +# regenerated. +# -------------------------------------------------------------------------- + +import uuid +from msrest.pipeline import ClientRawResponse +from msrestazure.azure_exceptions import CloudError + +from .. import models + + +class PrivateLinkResourcesOperations(object): + """PrivateLinkResourcesOperations operations. + + You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute. + + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + :ivar api_version: The API version. Constant value: "2018-06-01". + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self.api_version = "2018-06-01" + + self.config = config + + def get( + self, resource_group_name, factory_name, custom_headers=None, raw=False, **operation_config): + """Gets the private link resources. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param dict custom_headers: headers that will be added to the request + :param bool raw: returns the direct response alongside the + deserialized response + :param operation_config: :ref:`Operation configuration + overrides`. + :return: PrivateLinkResourcesWrapper or ClientRawResponse if raw=true + :rtype: ~azure.mgmt.datafactory.models.PrivateLinkResourcesWrapper or + ~msrest.pipeline.ClientRawResponse + :raises: :class:`CloudError` + """ + # Construct URL + url = self.get.metadata['url'] + path_format_arguments = { + 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$') + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} + query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') + + # Construct headers + header_parameters = {} + header_parameters['Accept'] = 'application/json' + if self.config.generate_client_request_id: + header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) + if custom_headers: + header_parameters.update(custom_headers) + if self.config.accept_language is not None: + header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + response = self._client.send(request, stream=False, **operation_config) + + if response.status_code not in [200]: + exp = CloudError(response) + exp.request_id = response.headers.get('x-ms-request-id') + raise exp + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('PrivateLinkResourcesWrapper', response) + + if raw: + client_raw_response = ClientRawResponse(deserialized, response) + return client_raw_response + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/privateLinkResources'}